Ejemplo n.º 1
0
 def test_default_value_load_mutable(self):
     configure_main(with_config_logging=False)
     server = model.Database(immutable=False)
     self.assertEqual(server.addr, '127.0.0.1')
     self.assertEqual(server.port, 3306)
     server.addr = '192.168.0.1'
     server.port = 8888
     self.assertEqual(server.addr, '192.168.0.1')
     self.assertEqual(server.port, 8888)
Ejemplo n.º 2
0
    def run(self):
        self.db = model.Database('hieroch.db')
        self.cio = cio.cio()
        self.cio.print_status(0, "Hieroch.")

        self.store_id = None
        self.today = datetime.date.today()
        self.origin_no = model.origin_by_name('offline')

        self.loop()

        self.db.save()
Ejemplo n.º 3
0
    def assert_main_output(self, threads, expected=None, db=None,
                           client=MockedClient):
        if expected is None:
            expected = self.get_expected_builds()
        if db is None:
            db = model.Database(':memory:')
        make_db.main(db, {self.JOBS_DIR: {}}, threads, True, client)

        result = {path: (started, finished, db.test_results_for_build(path))
                  for _rowid, path, started, finished in db.get_builds()}

        self.assertEqual(result, expected)
        return db
Ejemplo n.º 4
0
 def test_file_path_value_load_with_two_files(self):
     configure_main(config_path=['test.ini', 'test2.ini'], config_arg=None)
     server = model.Server()
     self.assertEqual(server.addr, '127.0.0.1')
     self.assertEqual(server.port, 80)
     self.assertEqual(server.udp, True)
     self.assertEqual(server.val_f, 0.5)
     self.assertEqual(server.val_d, {'a': 10, 'b': 20, 'c': 30})
     self.assertEqual(server.val_l, [1, 2, 3, 4, 5, 6])
     self.assertEqual(server.home, Path('../../..'))
     with self.assertRaises(AttributeError):
         self.conf.addr = '1.2.3.4'  # not editable
     server = model.Database(immutable=True)
     self.assertEqual(server.addr, '192.168.0.1')
     self.assertEqual(server.port, 3333)
Ejemplo n.º 5
0
 def test_ifconf_config_path(self):
     configure_main(config_path='test_all.ini', config_arg=None)
     server = model.Server()
     self.assertEqual(server.addr, '127.0.0.1')
     self.assertEqual(server.port, 80)
     self.assertEqual(server.udp, True)
     self.assertEqual(server.val_f, 0.5)
     self.assertEqual(server.val_d, {'a': 10, 'b': 20, 'c': 30})
     self.assertEqual(server.val_l, [9, 9, 9])
     self.assertEqual(server.home, Path('../../..'))
     with self.assertRaises(AttributeError):
         self.conf.addr = '1.2.3.4'  # not editable
     server = model.Database(immutable=True)
     self.assertEqual(server.addr, '192.168.0.1')
     self.assertEqual(server.port, 3333)
Ejemplo n.º 6
0
def main():
    args = get_args()
    db = model.Database(args.database)

    if not args.results_only:
        print("\nFacebook login:"******"")
        driver = lib.create_driver(not args.no_headless)
        try:
            lib.login_facebook(driver)
            get_groups.get_groups(driver)
            get_events.get_all_events(driver, args.debug)
            get_times.get_all_times(driver, args.debug)
        finally:
            driver.close()

    parse_results.parse_results(args.folder)
    print("\nDone...", flush=True)
Ejemplo n.º 7
0
#!/usr/bin/python3

import itertools
from bottle import route, run, view, static_file, redirect, request, response
import model
import datetime
import sys

max_votes = model.max_votes
db = model.Database(sys.argv[1] if len(sys.argv) > 1 else 'db.db')
started = False


def auth(method):
    def wrapper(*args, **kwargs):
        user = request.get_cookie("user")
        if user:
            kwargs['user'] = user
            return method(*args, **kwargs)
        else:
            redirect('/who')

    return wrapper


@route('/admin')
@view('admin')
def admin():
    return dict(started=started)

Ejemplo n.º 8
0
def main():
    database = model.Database()

    main_menu(database)

    database.disconnect()
Ejemplo n.º 9
0
    def test_main(self):
        # It's easier to run a full integration test with stubbed-out
        # external interfaces and validate the trace than it is to test
        # each individual piece.
        # The components are mostly tested in make_*_test.py.

        db = model.Database(':memory:')
        fake_sub = FakeSub([
            FakePullResponse([
                FakeReceivedMessage(
                    'b',
                    FakePubSubMessage(
                        'no_data', {
                            'eventType': 'OBJECT_FINALIZE',
                            'objectId': 'logs/fake/123/finished.json',
                            'bucketId': 'kubernetes-jenkins'
                        }))
            ]),
            FakePullResponse([]),
            FakePullResponse([
                FakeReceivedMessage(
                    'c',
                    FakePubSubMessage(
                        'no_data', {
                            'eventType': 'OBJECT_FINALIZE',
                            'objectId': 'logs/fake/123/finished.json',
                            'bucketId': 'kubernetes-jenkins'
                        }))
            ]),
            FakePullResponse([]),
            FakePullResponse([
                FakeReceivedMessage(
                    'd',
                    FakePubSubMessage(
                        'no_data', {
                            'eventType': 'OBJECT_FINALIZE',
                            'objectId': 'logs/fake/124/started.json',
                            'bucketId': 'kubernetes-jenkins'
                        }))
            ]),
            FakePullResponse([]),
        ])
        fake_client = FakeClient()
        fake_table = FakeTable('day', stream.load_schema(FakeSchemaField))
        fake_sub_path = 'projects/{project_id}/subscriptions/{sub}'
        tables = {'day': (fake_table, 'incr')}
        stream.main(db, fake_sub, fake_sub_path, fake_client, tables,
                    self.fake_buckets, make_db_test.MockedClient, [1, 0, 0,
                                                                   0].pop)

        # uncomment if the trace changes
        # import pprint; pprint.pprint(fake_sub.trace)
        # import pprint; pprint.pprint(fake_client.trace)
        # self.maxDiff = 3000

        now = make_db_test.MockedClient.NOW

        self.assertEqual(
            fake_sub.trace,
            [['pull', fake_sub_path, False], ['pull', fake_sub_path, True],
             ['modify-ack', fake_sub_path, ['b'], 180],
             ['ack', fake_sub_path, ['b']], ['pull', fake_sub_path, False],
             ['pull', fake_sub_path, True],
             ['modify-ack', fake_sub_path, ['c'], 180],
             ['ack', fake_sub_path, ['c']], ['pull', fake_sub_path, False],
             ['pull', fake_sub_path, True], ['ack', fake_sub_path, ['d']]])

        self.assertEqual(fake_client.trace, [[
            'insert-rows',
            ([{
                'elapsed':
                5,
                'finished':
                now,
                'job':
                'fake',
                'number':
                123,
                'passed':
                True,
                'path':
                'gs://kubernetes-jenkins/logs/fake/123',
                'result':
                'SUCCESS',
                'started':
                now - 5,
                'test': [{
                    'name': 'Foo',
                    'time': 3.0
                }, {
                    'failed': True,
                    'failure_text': 'stacktrace',
                    'name': 'Bad',
                    'time': 4.0
                }],
                'tests_failed':
                1,
                'tests_run':
                2
            }], ), {
                'skip_invalid_rows': True
            }
        ]])
Ejemplo n.º 10
0
        required=True,
    )
    parser.add_argument(
        '--threads',
        help='number of concurrent threads to download results with',
        default=32,
        type=int,
    )
    parser.add_argument('--junit',
                        action='store_true',
                        help='Download JUnit results from each build')
    parser.add_argument(
        '--buildlimit',
        help='maximum number of runs within each job to pull, \
         all jobs will be collected if unset or 0',
        default=sys.maxsize,
        type=int,
    )
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])
    main(
        model.Database(),
        yaml.safe_load(open(OPTIONS.buckets)),
        OPTIONS.threads,
        OPTIONS.junit,
        OPTIONS.buildlimit,
    )
Ejemplo n.º 11
0
            bucket += '/'
        get_builds(db, bucket, metadata, threads, client_class)
    if get_junit:
        download_junit(db, threads, client_class)


def get_options(argv):
    """Process command line arguments."""
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--buckets',
        help='YAML file with GCS bucket locations',
        required=True,
    )
    parser.add_argument(
        '--threads',
        help='number of concurrent threads to download results with',
        default=32,
        type=int,
    )
    parser.add_argument('--junit',
                        action='store_true',
                        help='Download JUnit results from each build')
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])
    main(model.Database(), yaml.load(open(OPTIONS.buckets)), OPTIONS.threads,
         OPTIONS.junit)
Ejemplo n.º 12
0
 def setUp(self):
     self.db = model.Database(':memory:')
Ejemplo n.º 13
0
 def test_file_path_value_load_test2(self):
     configure_main(config_path='test2.ini', config_arg=None)
     server = model.Database(immutable=True)
     self.assertEqual(server.addr, '192.168.0.1')
     self.assertEqual(server.port, 3333)
Ejemplo n.º 14
0
            bucket += '/'
        get_builds(db, bucket, metadata, threads, client_class)
    if get_junit:
        download_junit(db, threads, client_class)


def get_options(argv):
    """Process command line arguments."""
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--buckets',
        help='YAML file with GCS bucket locations',
        required=True,
    )
    parser.add_argument(
        '--threads',
        help='number of concurrent threads to download results with',
        default=32,
        type=int,
    )
    parser.add_argument('--junit',
                        action='store_true',
                        help='Download JUnit results from each build')
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])
    main(model.Database(), yaml.safe_load(open(OPTIONS.buckets)),
         OPTIONS.threads, OPTIONS.junit)
Ejemplo n.º 15
0
    def get_reactors(self):
        db = model.Database()
        con = db.get_country_codes()
        dict = {}
        reactors = []
        coorurl = 'https://en.wikipedia.org/wiki/List_of_nuclear_power_stations'
        coorr = requests.get(coorurl)
        coorsoup = BeautifulSoup(coorr.content, 'lxml')
        coortable = coorsoup.find_all('table')
        coordf = pd.read_html(str(coortable), header=0)
        coorjsondata = json.loads(coordf[1].to_json(orient='records'))

        for i in range(0, len(con)):
            if con[i][0] is not None:
                url = 'https://pris.iaea.org/PRIS/CountryStatistics/CountryDetails.aspx?current=' + con[
                    i][0].replace(" ", "")
                r = requests.get(url)
                soup = BeautifulSoup(r.content, 'lxml')
                table = soup.find_all('table')
                df = pd.read_html(str(table), header=0)
                jsondata = json.loads(df[3].to_json(orient='index'))
                dict[con[i][0].replace(" ", "")] = jsondata

        # manipulate json data and add country id to the reactors from the countries table
        print(coorjsondata)
        # for j in range(0, len(coorjsondata)):
        #     for i in con[0]:
        #         print(coorjsondata[j])
        #         print(dict[i]['0'])
        #         if coorjsondata[j]['Power station'].upper() in i[1].upper():
        #             lat = j['Location'].upper().split("/")[1].split(" ")[1]
        #             long = j['Location'].upper().split("/")[1].split(" ")[2]
        #             i['lat'] = long
        #             i['long'] = lat
        #             print(i)

        for i in range(0, len(con)):
            if con[i][0] is not None:
                for j in range(0, len(dict[con[i][0].replace(" ", "")])):
                    dict[con[i][0].replace(" ", "")][str(j)]['ID'] = con[i][1]
                    name = dict[con[i][0].replace(" ", "")][str(j)]['Name']
                    reactorType = dict[con[i][0].replace(" ",
                                                         "")][str(j)]['Type']
                    status = dict[con[i][0].replace(" ", "")][str(j)]['Status']
                    location = dict[con[i][0].replace(" ",
                                                      "")][str(j)]['Location']
                    rup = dict[con[i][0].replace(
                        " ", "")][str(j)]['Reference Unit Power  [MW]']
                    gec = dict[con[i][0].replace(
                        " ", "")][str(j)]['Gross Electrical Capacity [MW]']
                    fgc = dict[con[i][0].replace(
                        " ", "")][str(j)]['First Grid Connection']
                    id = dict[con[i][0].replace(" ", "")][str(j)]['ID']
                    for z in coorjsondata:
                        # print(name.upper(), z['Power station'].upper())
                        if z['Power station'].upper() in name.upper():
                            # print(name.upper(), z['Power station'].upper())
                            lat = z['Location'].split("/")[1].split(" ")[1]
                            long = z['Location'].split("/")[1].split(" ")[2]
                            if "S" in lat:
                                lat = float(
                                    re.findall("[0-9]*\.[0-9]*", lat)[0]) * -1
                            else:
                                lat = float(
                                    re.findall("[0-9]*\.[0-9]*", lat)[0])
                            if "W" in long:
                                long = float(
                                    re.findall("[0-9]*\.[0-9]*", long)[0]) * -1
                            else:
                                long = float(
                                    re.findall("[0-9]*\.[0-9]*", long)[0])
                            print(lat, long)
                            break
                        else:
                            lat = None
                            long = None
                        # print(name, reactorType, status, location, rup, gec, fgc, id, lat, long)
                    reactors.append(
                        rea.Reactor(name, reactorType, status, location, rup,
                                    gec, fgc, id, lat, long))
        return reactors
Ejemplo n.º 16
0
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--poll',
        required=True,
        help='Follow GCS changes from project/topic/subscription',
    )
    parser.add_argument(
        '--dataset',
        help='BigQuery dataset (e.g. k8s-gubernator:build)'
    )
    parser.add_argument(
        '--tables',
        nargs='+',
        default=[],
        help='Upload rows to table:days [e.g. --tables day:1 week:7 all:0]',
    )
    parser.add_argument(
        '--stop_at',
        type=int,
        help='Terminate when this hour (0-23) rolls around (in local time).'
    )
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])
    main(model.Database(),
         *load_sub(OPTIONS.poll),
         *load_tables(OPTIONS.dataset, OPTIONS.tables),
         stop=StopWhen(OPTIONS.stop_at))
Ejemplo n.º 17
0
def get_recipe():
    with model.Database(DB) as rdb:
        result = rdb.select_recipe(recipe_id)
        recipe = model.Recipe(result[0], result[1], result[2], result[3], result[4])
    return jsonify(recipe.dictionary)
Ejemplo n.º 18
0
 def test_file_path_value_load_test3_override(self):
     configure_main(config_path=['test3.ini', 'test2.ini'], config_arg=None)
     server = model.Database(immutable=True)
     self.assertEqual(server.addr, '192.168.0.100')
     self.assertEqual(server.port, 4444)
Ejemplo n.º 19
0
Archivo: main.py Proyecto: Rlllok/Home
def main():
    database = model.Database()

    main_menu(database)
Ejemplo n.º 20
0
            bucket += '/'
        get_builds(db, bucket, metadata, threads, client_class)
    if get_junit:
        download_junit(db, threads, client_class)


def get_options(argv):
    """Process command line arguments."""
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--buckets',
        help='YAML file with GCS bucket locations',
        required=True,
    )
    parser.add_argument(
        '--threads',
        help='number of concurrent threads to download results with',
        default=32,
        type=int,
    )
    parser.add_argument('--junit',
                        action='store_true',
                        help='Download JUnit results from each build')
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])
    main(model.Database('build.db'), yaml.load(open(OPTIONS.buckets)),
         OPTIONS.threads, OPTIONS.junit)
Ejemplo n.º 21
0
import model

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 20 * 1024 * 1024
limiter = Limiter(
    app,
    key_func=get_remote_address,  #根据访问者的IP记录访问次数
    default_limits=["200 per day", "50 per hour"]  #默认限制,一天最多访问200次,一小时最多访问50次
)
UPLOAD_PATH = 'files'  #where papers stored
minganlist = ['peace', 'and', 'love']
db = model.Database()


@app.route('/')
def method_name():
    hot_paper = db.get_hot_paper()
    return render_template('homepage.html', articles=hot_paper)


@app.route('/articles/<id>')
def article_detail(id):
    ip = request.remote_addr
    db.visit_paper(ip, id)
    lst = db.get_paper(id)
    lst2 = db.get_comments(id)
    return render_template('article_detail.html', articles=lst, comments=lst2)
Ejemplo n.º 22
0
    def test_main(self):
        # It's easier to run a full integration test with stubbed-out
        # external interfaces and validate the trace than it is to test
        # each individual piece.
        # The components are mostly tested in make_*_test.py.

        db = model.Database(':memory:')
        fakesub = FakeSub([
            [
                ('a', Attrs({'eventType': 'OBJECT_DELETE'})),
            ],
            [
                ('b',
                 Attrs({
                     'eventType': 'OBJECT_FINALIZE',
                     'objectId': 'logs/fake/123/finished.json',
                     'bucketId': 'kubernetes-jenkins'
                 })),
            ],
            [],
            [
                ('c',
                 Attrs({
                     'eventType': 'OBJECT_FINALIZE',
                     'objectId': 'logs/fake/123/finished.json',
                     'bucketId': 'kubernetes-jenkins'
                 })),
            ],
            [],
            [
                ('d',
                 Attrs({
                     'eventType': 'OBJECT_FINALIZE',
                     'objectId': 'logs/fake/124/started.json'
                 })),
            ],
            [],
        ])
        faketable = FakeTable('day', stream.load_schema(FakeSchemaField),
                              fakesub.trace)
        tables = {'day': (faketable, 'incr')}
        stream.main(db, fakesub, tables, make_db_test.MockedClient,
                    [1, 0, 0, 0].pop)

        # uncomment if the trace changes
        # import pprint; pprint.pprint(fakesub.trace)
        # self.maxDiff = 3000

        now = make_db_test.MockedClient.NOW

        self.assertEqual(
            fakesub.trace,
            [['pull', False], ['pull', True], ['pull', True], ['ack', ['a']],
             ['modify-ack', ['b'], 180], ['ack', ['b']],
             [
                 'insert-data',
                 ([[
                     5, now - 5, now, True, u'SUCCESS', None,
                     u'gs://kubernetes-jenkins/logs/fake/123', u'fake', 123,
                     [],
                     [{
                         'name': 'Foo',
                         'time': 3.0
                     }, {
                         'failed': True,
                         'failure_text': 'stacktrace',
                         'name': 'Bad',
                         'time': 4.0
                     }], 2, 1, None
                 ]], [1]), {
                     'skip_invalid_rows': True
                 }
             ], ['pull', False], ['pull', True], ['modify-ack', ['c'], 180],
             ['ack', ['c']], ['pull', False], ['pull', True], ['ack', ['d']]])
Ejemplo n.º 23
0
        db.reset_emitted(incremental_table)

    builds = db.get_builds(min_started=min_started,
                           incremental_table=incremental_table)

    rows_emitted = set()
    for rowid, path, started, finished in builds:
        try:
            results = db.test_results_for_build(path)
            row = row_for_build(path, started, finished, results)
            json.dump(row, outfile, sort_keys=True)
            outfile.write('\n')
            rows_emitted.add(rowid)
        except IOError:
            return
        except:
            logging.exception('error on %s', path)

    if rows_emitted:
        gen = db.insert_emitted(rows_emitted,
                                incremental_table=incremental_table)
        print >> sys.stderr, 'incremental progress gen #%d' % gen
    else:
        print >> sys.stderr, 'no rows emitted'


if __name__ == '__main__':
    db = model.Database('build.db')
    opts = parse_args(sys.argv[1:])
    main(db, opts, sys.stdout)
Ejemplo n.º 24
0
    parser.add_argument(
        '--poll',
        required=True,
        help='Follow GCS changes from project/topic/subscription',
    )
    parser.add_argument(
        '--dataset',
        help='BigQuery dataset (e.g. k8s-gubernator:build)'
    )
    parser.add_argument(
        '--tables',
        nargs='+',
        default=[],
        help='Upload rows to table:days [e.g. --tables day:1 week:7 all:0]',
    )
    parser.add_argument(
        '--stop_at',
        type=int,
        help='Terminate when this hour (0-23) rolls around (in local time).'
    )
    return parser.parse_args(argv)


if __name__ == '__main__':
    OPTIONS = get_options(sys.argv[1:])

    main(model.Database('build.db'),
         load_sub(OPTIONS.poll),
         load_tables(OPTIONS.dataset, OPTIONS.tables),
         stop=StopWhen(OPTIONS.stop_at))
Ejemplo n.º 25
0
 def __init__(self):
     self.root = tkinter.Tk()  # main window of application
     self.app_view = view.AppView(self.root)  # creates app's view
     self.app_model = model.Database()  # creates app's model
     self.app_view.insert_button.config(command=self.add_record)  # bind method to button
     self.app_view.delete_button.config(command=self.delete_record)  # bind method to button
Ejemplo n.º 26
0
        db.reset_emitted(incremental_table)
        builds = list(db.get_builds_from_paths(opts.paths, incremental_table))
    else:
        builds = db.get_builds(min_started=min_started,
                               incremental_table=incremental_table)

    rows_emitted = set()
    for rowid, row in make_rows(db, builds):
        size = json_size(row)
        if size > MAX_ROW_SIZE:
            print('row for %s exceeds maximum for bigquery %d > %d' %
                  (row['path'], size, MAX_ROW_SIZE))
            continue
        json.dump(row, outfile, sort_keys=True)
        outfile.write('\n')
        rows_emitted.add(rowid)

    if rows_emitted:
        gen = db.insert_emitted(rows_emitted,
                                incremental_table=incremental_table)
        print('incremental progress gen #%d' % gen, file=sys.stderr)
    else:
        print('no rows emitted', file=sys.stderr)
    return 0


if __name__ == '__main__':
    DB = model.Database()
    OPTIONS = parse_args(sys.argv[1:])
    sys.exit(main(DB, OPTIONS, sys.stdout))
Ejemplo n.º 27
0
import os
import re
import psycopg2
import model
from flask import Flask, request, jsonify
from flask_cors import CORS

app = Flask(__name__)
CORS(app)

DATABASE_URL = os.environ['DATABASE_URL']

conn = psycopg2.connect(DATABASE_URL, sslmode='require')
cursor = conn.cursor()
db = model.Database(cursor)


@app.route('/news', methods=['GET', 'POST'])
def news():
    if request.method == 'GET':
        amount = int(request.args.get('amount'))
        category = int(request.args.get('category'))

        return jsonify(db.many_news(amount, category))
    elif request.method == 'POST':
        content = request.get_json(cache=False)
        db.add_news(
            content['title'],
            get_image(content['content']),
            content['category'],
            content['content'],