def test_find_node_correlations_A():
    """
    `find_node_correlations`
    Feature A: correctly terminating if insufficient attractors found.
    """
    # Test for {no attractors found, one attractor found, two attractors found}.
    aggregated_attractors_1 = dict()
    aggregated_attractors_2 = {
        1: construct_aggregated_attractor(attractor_states_1, 10, 1, 0)
    }
    aggregated_attractors_3 = {
        1: construct_aggregated_attractor(attractor_states_1, 1, 1, 0),
        8: construct_aggregated_attractor(attractor_states_2, 1, 1, 0)
    }
    for aggregated_attractors in [
            aggregated_attractors_1, aggregated_attractors_2,
            aggregated_attractors_3
    ]:
        db_conn = ZODB.connection(None)
        init_attractor_db_structure(db_conn)
        for key, aggregated_attractor in aggregated_attractors.items():
            db_conn.root.aggregated_attractors[key] = aggregated_attractor
            db_conn.root.n_aggregated_attractors.change(1)
            db_conn.root.total_frequency.change(aggregated_attractor.frequency)

        node_correlations = find_node_correlations(db_conn, None, True)

        test_description = generate_test_description(locals(),
                                                     'aggregated_attractors')
        assert node_correlations is None, test_description
示例#2
0
 def setUp(self):
     from ..site import Site
     self.site = Site("Test")
     self.generation = self.site.generation
     self.conn = ZODB.connection(None)
     self.conn.root.site = self.site
     self.conn.transaction_manager.commit()
示例#3
0
文件: db.py 项目: cynddl/katalog
def get_root():
    """
    Return the root object of the current database.
    """
    db_path = click.get_app_dir('katalog', force_posix=True)
    connection = ZODB.connection(db_path + '/data.fs')
    return connection.root()
示例#4
0
def test_simulate_master_A():
    """
    `simulate_master`
    Feature A: performing simulations irrespectively of performance tuning.
    """
    predecessor_node_lists, truth_tables = build_predecessor_nodes_lists_and_truth_tables(
        UPDATE_RULES_B)
    initial_state = [False, False, True, False, False, False]
    fixed_nodes = dict()
    perturbed_nodes_by_t = dict()
    max_t = 101
    initial_state_variations = []
    fixed_nodes_variations = []
    perturbed_nodes_by_t_variations = [(40, 0, NodeStateRange.MAYBE_TRUE)]
    n_simulation_problems = count_simulation_problems(
        initial_state_variations, fixed_nodes_variations,
        perturbed_nodes_by_t_variations)
    # Test for {single batch per process, multiple batches per process}.
    n_simulation_problem_batches_per_process_1 = 1
    n_simulation_problem_batches_per_process_2 = 5

    expected_simulation_states_1 = \
        [initial_state] + 25 * [[True, False, False, True, True, False],
                                [True, True, False, False, True, True],
                                [False, True, False, False, False, True],
                                [False, False, True, False, False, False]] + \
        [[True, False, False, True, True, False]]
    expected_simulation_1 = Simulation(expected_simulation_states_1, dict(),
                                       dict())
    expected_simulation_states_2 = \
        expected_simulation_states_1[:40] + [[True, False, True, False, False, False],
                                             [True, True, False, True, False, False]] + \
        60 * [[True, True, False, False, False, False]]
    expected_simulation_2 = Simulation(expected_simulation_states_2, dict(),
                                       {40: {
                                           0: True
                                       }})
    expected_simulations = [expected_simulation_1, expected_simulation_2]

    for n_simulation_problem_batches_per_process in [
            n_simulation_problem_batches_per_process_1,
            n_simulation_problem_batches_per_process_2
    ]:
        db_conn = ZODB.connection(None)
        init_simulation_db_structure(db_conn)
        simulate_master(MPICommWrapper(),
                        n_simulation_problem_batches_per_process,
                        (initial_state, fixed_nodes, perturbed_nodes_by_t),
                        (initial_state_variations, fixed_nodes_variations,
                         perturbed_nodes_by_t_variations),
                        predecessor_node_lists, truth_tables, max_t,
                        n_simulation_problems, db_conn, None)

        test_description = generate_test_description(
            locals(), 'n_simulation_problem_batches_per_process')
        assert list(db_conn.root.simulations.values()
                    ) == expected_simulations, test_description
        assert db_conn.root.n_simulations() == len(
            expected_simulations), test_description
 def test_prefetch_optional(self):
     conn = ZODB.connection(None)
     conn.prefetch(z64)
     conn.prefetch([z64])
     conn.prefetch(conn.root())
     conn.prefetch(z64, [z64])
     conn.prefetch(z64, [z64], conn.root())
     conn.close()
示例#6
0
 def setUp(self):
     from ..site import Site
     self.conn = ZODB.connection(None)
     self.conn.root.site = self.site = Site('Test site')
     self.site.add_board('dev', 'Development', 'Development projects')
     self.board = self.site.boards['dev']
     self.board_generation = self.board.generation
     self.conn.transaction_manager.commit()
 def test_prefetch_optional_imvcc(self):
     conn = ZODB.connection(MVCCMappingStorage())
     conn.prefetch(z64)
     conn.prefetch([z64])
     conn.prefetch(conn.root())
     conn.prefetch(z64, [z64])
     conn.prefetch(z64, [z64], conn.root())
     conn.close()
示例#8
0
def test_attract_master_A():
    """
    `attract_master`
    Feature A: finding attractors irrespectively of performance tuning.
    """
    predecessor_node_lists, truth_tables = build_predecessor_nodes_lists_and_truth_tables(
        UPDATE_RULES_A)
    initial_state = [False, False, False, False, False]
    max_t = inf
    max_attractor_l = inf
    initial_state_variations = [0, 1, 2, 3, 4]
    fixed_nodes_variations = []
    perturbed_nodes_by_t_variations = []
    fixed_nodes = {2: True}
    perturbed_nodes_by_t = {5: {2: True}, 1000: {2: False}}
    n_simulation_problems = count_simulation_problems(
        initial_state_variations, fixed_nodes_variations,
        perturbed_nodes_by_t_variations)
    # Test for {single batch per process, multiple batches per process}.
    n_simulation_problem_batches_per_process_1 = 1
    n_simulation_problem_batches_per_process_2 = 5
    # Test for {not storing all states, storing all states}.
    storing_all_states_1 = False
    storing_all_states_2 = True
    # Test for {not packing DB, packing DB}.
    packing_db_1 = False
    packing_db_2 = True

    expected_attractors = \
        {(-32, 23): construct_aggregated_attractor([[True, True, True, False, True]], 32, 1005, 0)}
    expected_total_frequency = sum(
        attractor.frequency for attractor in expected_attractors.values())

    for n_simulation_problem_batches_per_process, storing_all_states, packing_db in product(
        [
            n_simulation_problem_batches_per_process_1,
            n_simulation_problem_batches_per_process_2
        ], [storing_all_states_1, storing_all_states_2],
        [packing_db_1, packing_db_2]):
        db_conn = ZODB.connection(None)
        init_attractor_db_structure(db_conn)
        attract_master(MPICommWrapper(),
                       n_simulation_problem_batches_per_process,
                       (initial_state, fixed_nodes, perturbed_nodes_by_t),
                       (initial_state_variations, [], []),
                       predecessor_node_lists, truth_tables, max_t,
                       max_attractor_l, n_simulation_problems,
                       storing_all_states, db_conn, packing_db, None)

        test_description = generate_test_description(
            locals(), 'n_simulation_problem_batches_per_process',
            'storing_all_states')
        assert dict(db_conn.root.aggregated_attractors.items()) == expected_attractors, \
            test_description
        assert db_conn.root.n_aggregated_attractors() == len(
            expected_attractors), test_description
        assert db_conn.root.total_frequency(
        ) == expected_total_frequency, test_description
示例#9
0
 def opendb(self, contName=None):
     """ Open the database if not yet,
     return the required container.
     """
     if self.conn is None:
         self.conn = ZODB.connection(self.db_path)
     if contName is None:
         contName = self.contName
     if contName is None:
         raise "must specify a container name"
     return self.getContainer(self.conn.root, contName)
示例#10
0
 def opendb(self, contName=None):
     """ Open the database if not yet,
     return the required container.
     """
     if self.conn is None:
         self.conn = ZODB.connection(self.db_path)
     if contName is None:
         contName = self.contName
     if contName is None:
         raise "must specify a container name"
     return self.getContainer(self.conn.root, contName)
示例#11
0
def test_target_master_B():
    """
    `target_master`
    Feature B: finding no more than requested number of simulations
    reaching target state.
    """
    predecessor_node_lists, truth_tables = build_predecessor_nodes_lists_and_truth_tables(
        UPDATE_RULES_B)
    initial_state = [False, False, False, False, False, False]
    substate_node_set = {0, 1, 2, 3, 4, 5}
    _encode_state, _ = configure_encode_and_simulate(
        substate_node_set=substate_node_set)
    _, target_substate_code = _encode_state(
        [True, True, False, False, False, True])
    max_t = inf
    n_simulations_to_reach_target_substate = 1
    initial_state_variations = [0, 1, 2, 3, 4, 5]
    fixed_nodes_variations = []
    perturbed_nodes_by_t_variations = []
    fixed_nodes = {0: True, 1: True, 2: False, 3: False, 4: False, 5: True}
    perturbed_nodes_by_t = dict()
    n_simulation_problems = count_simulation_problems(
        initial_state_variations, fixed_nodes_variations,
        perturbed_nodes_by_t_variations)
    # Test for {single batch per process, multiple batches per process}.
    n_simulation_problem_batches_per_process_1 = 1
    n_simulation_problem_batches_per_process_2 = 5

    expected_simulations = [
        Simulation([initial_state] + [[True, True, False, False, False, True]],
                   fixed_nodes, perturbed_nodes_by_t)
    ]

    for n_simulation_problem_batches_per_process in [
            n_simulation_problem_batches_per_process_1,
            n_simulation_problem_batches_per_process_2
    ]:
        db_conn = ZODB.connection(None)
        init_simulation_db_structure(db_conn)
        target_master(MPICommWrapper(),
                      n_simulation_problem_batches_per_process,
                      (initial_state, fixed_nodes, perturbed_nodes_by_t),
                      (initial_state_variations, fixed_nodes_variations,
                       perturbed_nodes_by_t_variations), target_substate_code,
                      substate_node_set, predecessor_node_lists, truth_tables,
                      n_simulations_to_reach_target_substate, max_t,
                      n_simulation_problems, db_conn, None)

        test_description = generate_test_description(
            locals(), 'n_simulation_problem_batches_per_process')
        assert list(db_conn.root.simulations.values()
                    ) == expected_simulations, test_description
        assert db_conn.root.n_simulations() == len(
            expected_simulations), test_description
示例#12
0
    def opendb(self):
        """ self.db is a dictionary-like object, the initial db
        will be empty, thus this statement: not self.db,
        will always yield a True, which cause the process try
        to open the DB again even if it did open it before, and
        cause an exception (locking problem).

        The correct way is compare it directly with None object.
        """
        if self.db is None:
            connection = ZODB.connection(self.db_path)
            self.db    = getContainer(connection.root, 'main')
示例#13
0
    def opendb(self):
        """ self.db is a dictionary-like object, the initial db
        will be empty, thus this statement: not self.db,
        will always yield a True, which cause the process try
        to open the DB again even if it did open it before, and
        cause an exception (locking problem).

        The correct way is compare it directly with None object.
        """
        if self.db is None:
            connection = ZODB.connection(self.db_path)
            self.db = getContainer(connection.root, 'main')
示例#14
0
文件: _db.py 项目: stjordanis/db-1
def connection(dsn, **kw):
    """Create a newt :py:class:`newt.db.Connection`.

    Keyword options can be used to provide either `ZODB.DB
    <http://www.zodb.org/en/latest/reference/zodb.html#databases>`_
    options or `RelStorage
    <http://relstorage.readthedocs.io/en/latest/relstorage-options.html>`_
    options.
    """
    db_options, storage_options = _split_options(**kw)
    return Connection(
        ZODB.connection(storage(dsn, **storage_options), **db_options))
def test_find_node_correlations_B():
    """
    `find_node_correlations`
    Feature B: calculating node correlations if sufficient attractors found.
    """
    # Test for {two attractors found, more than two attractors found}.
    aggregated_attractors_1 = {
        2: construct_aggregated_attractor(attractor_states_1, 1, 1, 0),
        3: construct_aggregated_attractor(attractor_states_2, 2, 2, 1)
    }
    aggregated_attractors_2 = {
        1: construct_aggregated_attractor(attractor_states_1, 10, 1, 0),
        8: construct_aggregated_attractor(attractor_states_2, 2, 2, 1),
        18: construct_aggregated_attractor(attractor_states_3, 20, 5, 4)
    }

    expected_Rhos = [
        np.array([[1, -1, np.nan, -1], [-1, 1, np.nan, 1],
                  [np.nan, np.nan, np.nan, np.nan], [-1, 1, np.nan, 1]]),
        np.array([[1, -0.77777778, np.nan, -0.49236596],
                  [-0.77777778, 1, np.nan, -0.16412199],
                  [np.nan, np.nan, np.nan, np.nan],
                  [-0.49236596, -0.16412199, np.nan, 1]])
    ]
    expected_Ps = [
        np.array([[0, 0, np.nan, 0], [0, 0, np.nan, 0],
                  [np.nan, np.nan, np.nan, np.nan], [0, 0, np.nan, 0]]),
        np.array([[0, 1.62331867e-07, np.nan, 4.20171535e-03],
                  [1.62331867e-07, 0, np.nan, 3.69407243e-01],
                  [np.nan, np.nan, np.nan, np.nan],
                  [4.20171535e-03, 3.69407243e-01, np.nan, 0]])
    ]
    for aggregated_attractors, (expected_Rho, expected_P) in zip(
        [aggregated_attractors_1, aggregated_attractors_2],
            zip(expected_Rhos, expected_Ps)):
        db_conn = ZODB.connection(None)
        init_attractor_db_structure(db_conn)
        for key, aggregated_attractor in aggregated_attractors.items():
            db_conn.root.aggregated_attractors[key] = aggregated_attractor
            db_conn.root.n_aggregated_attractors.change(1)
            db_conn.root.total_frequency.change(aggregated_attractor.frequency)

        Rho, P = find_node_correlations(db_conn, None, True)

        test_description = generate_test_description(locals(),
                                                     'aggregated_attractors')
        assert np.allclose(expected_Rho, Rho, equal_nan=True), test_description
        assert np.allclose(expected_P, P, equal_nan=True), test_description
示例#16
0
    def test_used_by_connection(self):
        import ZODB
        from ZODB.MappingStorage import MappingStorage

        class Storage(MappingStorage):
            def tpc_begin(self, transaction):
                self.test_transaction = transaction
                return MappingStorage.tpc_begin(self, transaction)

        storage = Storage()
        conn = ZODB.connection(storage)
        with conn.transaction_manager as t:
            t.user = u'user\x80'
            t.description = u'description\x80'
            t.setExtendedInfo('foo', 'FOO')
            conn.root.x = 1

        t = storage.test_transaction
        self.assertEqual(t.__class__, TransactionMetaData)
        self.assertEqual(t.user, b'user\xc2\x80')
        self.assertEqual(t.description, b'description\xc2\x80')
        self.assertEqual(t.extension, dict(foo='FOO'))
示例#17
0
    def test_used_by_connection(self):
        import ZODB
        from ZODB.MappingStorage import MappingStorage

        class Storage(MappingStorage):
            def tpc_begin(self, transaction):
                self.test_transaction = transaction
                return MappingStorage.tpc_begin(self, transaction)

        storage = Storage()
        conn = ZODB.connection(storage)
        with conn.transaction_manager as t:
            t.user = u'user\x80'
            t.description = u'description\x80'
            t.setExtendedInfo('foo', 'FOO')
            conn.root.x = 1

        t = storage.test_transaction
        self.assertEqual(t.__class__, TransactionMetaData)
        self.assertEqual(t.user, b'user\xc2\x80')
        self.assertEqual(t.description, b'description\xc2\x80')
        self.assertEqual(t.extension, dict(foo='FOO'))
示例#18
0
def connection(dsn, blob_dir=None, **kw):
    td = None
    if blob_dir is None:
        if dsn.startswith('postgresql://'):
            td = tempfile.TemporaryDirectory('blobs')
            blob_dir = td.name
        else:
            blob_dir = dsn + '.blobs'

    if dsn.startswith('postgresql://'):
        import newt.db
        conn = newt.db.connection(dsn,
                                  blob_dir=blob_dir,
                                  shared_blob_dir=False,
                                  keep_history=True,
                                  **kw)
    else:
        import ZODB
        conn = ZODB.connection(dsn, blob_dir=blob_dir)

    conn._td = td  # hold on till garbage
    return conn
示例#19
0
def _open_db(path):
    conn = _connections.get(path)
    if not (conn and _db_is_opened(conn)):
        conn = ZODB.connection(path)
        _connections[path] = conn
    return conn
示例#20
0
    def __init__(self, *args, **kwargs):
        db_filename = kwargs.pop("database", "data.fs")
        super().__init__(*args, **kwargs)

        self.db = ZODB.connection(db_filename)
        self.root = self.db.root()
示例#21
0
import sys

import ZODB
import persistent
import transaction

import config


class _Database(persistent.Persistent):
    def __init__(self):
        self.squad_server_message_ids = persistent.list.PersistentList()
        self.post_server_message_ids = persistent.list.PersistentList()


sys.stdout.write("Starting database...")
connection = ZODB.connection(config.DATABASE_FILENAME)
root = connection.root
if not hasattr(root, "db"):
    database = _Database()
    root.db = database
    transaction.commit()

sys.stdout.write("done\n")
db = root.db
示例#22
0
def test_output_attractors_A():
    """
    `output_attractors`
    Feature A: successful output.
    """
    output_dirpath = "Test output of attractors"
    pdf_page_limit = 1
    is_single_process = True
    packing_db = False
    # Test for {horizontal layout, stacked layout}.
    n_nodes_1 = 2
    n_nodes_2 = 40
    # Test for {no fixed nodes, fixed nodes}.
    fixed_nodes_1 = dict()
    fixed_nodes_2 = {0: False}
    # Test for {attractor found for every simulation problem,
    # attractor not found for some simulation problems}.
    n_simulation_problems_1 = 3
    n_simulation_problems_2 = 4
    # Test for {no time cap, time cap}.
    max_t_1 = inf
    max_t_2 = 10
    # Test for {no attractor length cap, attractor length cap}.
    max_attractor_l_1 = inf
    max_attractor_l_2 = 10
    # Test for {single aggregated attractor batch, multiple
    # aggregated attractor batches}.
    aggregated_attractor_batch_indices_1 = [1, 1]
    aggregated_attractor_batch_indices_2 = [1, 2]
    # Test for {no PDF format, PDF format}.
    to_pdf_1 = False
    to_pdf_2 = True
    # Test for {no image formats, all image formats}.
    image_formats_and_dpis_1 = []
    image_formats_and_dpis_2 = [('svg', None), ('png', 300), ('tiff', 150)]
    # Test for {no CSV format, CSV format}.
    to_csv_1 = False
    to_csv_2 = True

    for n_nodes, fixed_nodes, n_simulation_problems, max_t, max_attractor_l, \
        aggregated_attractor_batch_indices in product(
        [n_nodes_1, n_nodes_2], [fixed_nodes_1, fixed_nodes_2],
        [n_simulation_problems_1, n_simulation_problems_2], [max_t_1, max_t_2],
        [max_attractor_l_1, max_attractor_l_2],
        [aggregated_attractor_batch_indices_1, aggregated_attractor_batch_indices_2]):

        if n_simulation_problems > 3 and max_t == inf and max_attractor_l == inf:
            continue

        node_names = [('${}$' if i % 2 else '{}').format('node{}'.format(i))
                      for i in range(n_nodes)]
        # Fill in attractor database.
        db_conn = ZODB.connection(None)
        init_attractor_db_structure(db_conn)
        aggregated_attractor_1 = construct_aggregated_attractor(
            [[False] * n_nodes], 1, 1, 0)
        aggregated_attractor_2 = construct_aggregated_attractor(
            [[True] * n_nodes] * 2, 2, 1.5, .5)

        for i, (aggregated_attractor_batch_index,
                aggregated_attractor) in enumerate(
                    zip(aggregated_attractor_batch_indices,
                        [aggregated_attractor_1, aggregated_attractor_2])):
            aggregated_attractor_key = i + 1
            aggregated_attractors = {
                aggregated_attractor_key: aggregated_attractor
            }
            write_aggregated_attractors_to_db(
                packing_db, db_conn, aggregated_attractors,
                aggregated_attractor_batch_index)

        for to_pdf, image_formats_and_dpis, to_csv in product(
            [to_pdf_1, to_pdf_2],
            [image_formats_and_dpis_1, image_formats_and_dpis_2],
            [to_csv_1, to_csv_2]):

            if not to_pdf and not image_formats_and_dpis and not to_csv:
                continue

            os.makedirs(output_dirpath, exist_ok=True)
            test_description = generate_test_description(
                locals(), 'n_nodes', 'fixed_nodes', 'n_simulation_problems',
                'max_t', 'max_attractor_l',
                'aggregated_attractor_batch_indices', 'to_pdf',
                'image_formats_and_dpis', 'to_csv')
            try:
                output_attractors(db_conn, fixed_nodes, node_names,
                                  n_simulation_problems, max_attractor_l,
                                  max_t, output_dirpath, is_single_process,
                                  to_pdf, pdf_page_limit,
                                  image_formats_and_dpis, to_csv)
            except:
                pytest.fail(test_description)
            finally:
                shutil.rmtree(output_dirpath)
示例#23
0
def test_output_simulations_A():
    """
    `output_simulations`
    Feature A: successful output.
    """
    output_dirpath = "Test output of simulations"
    pdf_page_limit = 1
    is_single_process = True
    # Test for {horizontal layout, stacked layout}.
    n_nodes_1 = 2
    n_nodes_2 = 40
    # Test for {no fixed nodes, fixed nodes}.
    fixed_nodes_1 = dict()
    fixed_nodes_2 = {0: False}
    # Test for {no perturbations, perturbations}.
    perturbed_nodes_by_t_1 = dict()
    perturbed_nodes_by_t_2 = {1: {0: True}, 2: {0: False}}
    # Test for {single simulation batch, multiple simulation batches}.
    simulation_cutoff_index_for_batches_1 = 1
    simulation_cutoff_index_for_batches_2 = 2
    # Test for {no PDF format, PDF format}.
    to_pdf_1 = False
    to_pdf_2 = True
    # Test for {no image formats, all image formats}.
    image_formats_and_dpis_1 = []
    image_formats_and_dpis_2 = [('svg', None), ('png', 300), ('tiff', 150)]
    # Test for {no CSV format, CSV format}.
    to_csv_1 = False
    to_csv_2 = True

    for n_nodes, fixed_nodes, perturbed_nodes_by_t, simulation_cutoff_index_for_batches in product(
        [n_nodes_1, n_nodes_2], [fixed_nodes_1, fixed_nodes_2],
        [perturbed_nodes_by_t_1, perturbed_nodes_by_t_2], [
            simulation_cutoff_index_for_batches_1,
            simulation_cutoff_index_for_batches_2
        ]):

        node_names = [('${}$' if i % 2 else '{}').format('node{}'.format(i))
                      for i in range(n_nodes)]
        # Fill in simulation database.
        db_conn = ZODB.connection(None)
        init_simulation_db_structure(db_conn)
        simulation_1 = Simulation([[False] * n_nodes], fixed_nodes,
                                  perturbed_nodes_by_t)
        simulation_2 = Simulation([[True] * n_nodes] * 2, fixed_nodes,
                                  perturbed_nodes_by_t)
        simulations = [simulation_1, simulation_2]
        simulation_batch_1 = simulations[:simulation_cutoff_index_for_batches]
        simulation_batch_2 = simulations[simulation_cutoff_index_for_batches:]

        for simulation_batch_index, simulation_batch in enumerate(
            [simulation_batch_1, simulation_batch_2]):
            write_simulations_to_db(db_conn, simulation_batch,
                                    simulation_batch_index)

        for to_pdf, image_formats_and_dpis, to_csv in product(
            [to_pdf_1, to_pdf_2],
            [image_formats_and_dpis_1, image_formats_and_dpis_2],
            [to_csv_1, to_csv_2]):

            if not to_pdf and not image_formats_and_dpis and not to_csv:
                continue

            os.makedirs(output_dirpath, exist_ok=True)
            test_description = generate_test_description(
                locals(), 'n_nodes', 'fixed_nodes', 'perturbed_nodes_by_t',
                'simulation_cutoff_index_for_batches', 'to_pdf',
                'image_formats_and_dpis', 'to_csv')
            try:
                output_simulations(db_conn, node_names, output_dirpath,
                                   is_single_process, to_pdf, pdf_page_limit,
                                   image_formats_and_dpis, to_csv)
            except:
                pytest.fail(test_description)
            finally:
                shutil.rmtree(output_dirpath)
# Beware the GitHub rate limit: https://developer.github.com/v3/#rate-limiting
# "For requests using Basic Authentication or OAuth, you can make up to 5,000
# requests per hour."
#
# In the code below, we track the number of requests made and the reset time.
# When we hit the limit, we pause until the reset time and then continue.

github = github3.login(github_login, github_password)
calls_left = api_calls_left(github)

msg('Started at ', datetime.now())
started = timer()

msg('Opening database "{}"'.format(dbfile))
dbconnection = ZODB.connection(dbfile)
dbroot = dbconnection.root()

if not 'github' in dbroot.keys():
    msg('Empty database -- creating root object')
    dbroot['github'] = BTree()
else:
    print('"{}" contains {} entries'.format(dbfile, len(dbroot['github'])))

db = dbroot['github']

msg('Initial GitHub API calls remaining: ', calls_left)
msg('Generating list of all repositories:')

# If we're restarting this process, we will already have entries in the db.
示例#25
0
from BTrees.OOBTree import BTree
import transaction


class Student:
    def __init__(self, id, name, class_name, score):
        self.id = id
        self.name = name
        self.class_name = class_name
        self.score = score


csv_filename = 'data.csv'
db_filename = 'data/data.fs'

conn = ZODB.connection(db_filename)
students = BTree()
root = conn.root

root.students = students

with open(csv_filename, mode="r", encoding="GBK") as file:
    students.clear()
    reader = csv.reader(file)
    for row in reader:
        id = row[0]
        name = row[1]
        class_name = row[2]
        score = float(row[3])
        if id in students.keys():
            print(f"载入失败:学号{id}已存在!")
示例#26
0
import ZODB, ZODB.FileStorage
import BTrees.OOBTree
import persistent
from dataclasses import dataclass, field
import datetime

connection = ZODB.connection('mydata.fs')
root = connection.root

@dataclass
class Person(persistent.Persistent):
    name: str
    birthday: datetime.date
    friends: list = field(default_factory=list)

# root.people = [Person('A', datetime.date(2000,1,1))]
def create_people():
    print(root)
    root.people = BTrees.OOBTree.BTree()
    root.people['A'] = Person('A', datetime.date(2000,1,1))


print(root, root.people)
print(root.people['A'])
# for key, person in root.people.items():
#     print(key,person)
# root.people['A'].name = 'AAA'

# import transaction
# transaction.commit()