Exemplo n.º 1
0
def connection():
    dj.config['safemode'] = False
    connection = dj.conn(host=getenv('TEST_DB_SERVER'),
                         user=getenv('TEST_DB_USER'),
                         password=getenv('TEST_DB_PASS'), reset=True)
    schema1 = dj.Schema('schema1', connection=connection)
    @schema1
    class TableA(dj.Manual):
        definition = """
        id: int
        ---
        name: varchar(30)
        """

    schema2 = dj.Schema('schema2', connection=connection)
    @schema2
    class TableB(dj.Manual):
        definition = """
        id: int
        ---
        number: float
        """
    yield connection
    schema1.drop()
    schema2.drop()
    connection.close()
    dj.config['safemode'] = True
Exemplo n.º 2
0
def virtual_module():
    dj.config['safemode'] = False
    connection = dj.conn(host=getenv('TEST_DB_SERVER'),
                         user=getenv('TEST_DB_USER'),
                         password=getenv('TEST_DB_PASS'),
                         reset=True)
    schema = dj.Schema('filter')

    @schema
    class Student(dj.Lookup):
        definition = """
        student_id: int
        ---
        student_name: varchar(50)
        student_ssn: varchar(20)
        student_enroll_date: datetime
        student_balance: float
        student_parking_lot=null : varchar(20)
        student_out_of_state: bool
        """
        contents = [(i, faker.name(), faker.ssn(),
                     faker.date_between_dates(date_start=date(2021, 1, 1),
                                              date_end=date(2021, 1, 31)),
                     round(randint(1000, 3000),
                           2), choice([None, 'LotA', 'LotB',
                                       'LotC']), bool(getrandbits(1)))
                    for i in range(100)]

    yield dj.VirtualModule('filter', 'filter')
    schema.drop()
    connection.close()
    dj.config['safemode'] = True
Exemplo n.º 3
0
def migrate_dj011_external_blob_storage_to_dj012(migration_schema, store):
    """
    Utility function to migrate external blob data from 0.11 to 0.12.
    :param migration_schema: string of target schema to be migrated
    :param store: string of target dj.config['store'] to be migrated
    """
    if not isinstance(migration_schema, str):
        raise ValueError(
            'Expected type {} for migration_schema, not {}.'.format(
                str, type(migration_schema)))

    do_migration = False
    do_migration = user_choice("""
Warning: Ensure the following are completed before proceeding.
- Appropriate backups have been taken,
- Any existing DJ 0.11.X connections are suspended, and
- External config has been updated to new dj.config['stores'] structure.
Proceed?
            """,
                               default='no') == 'yes'
    if do_migration:
        _migrate_dj011_blob(dj.Schema(migration_schema), store)
        print('Migration completed for schema: {}, store: {}.'.format(
            migration_schema, store))
        return
    print('No migration performed.')
Exemplo n.º 4
0
    def create_administrator(self):
        # insert administrator if not users exist and create
        self.user_table.insert1(self['administrator_info'])

        # use standard password
        password = self['standard_password']
        # establish connection
        conn = self['connection']
        connection = '%'
        username = self['administrator_info'][self['user_name']]

        # for safety flush all privileges
        conn.query("FLUSH PRIVILEGES;")

        conn.query("DROP USER IF EXISTS %s@%s;", (username, connection))
        conn.query("CREATE USER %s@%s IDENTIFIED BY %s;",
                   (username, connection, password))

        # create user-specific schema
        schema = dj.Schema(username)

        privileges = {
            '*.*': "ALL PRIVILEGES",
        }

        for dbtable, privilege in privileges.items():
            privilege = (f"GRANT {privilege} ON {dbtable} to %s@%s;")
            conn.query(privilege, (username, connection))

        conn.query("FLUSH PRIVILEGES;")

        return schema
Exemplo n.º 5
0
def test_overlapping_name():
    test_schema = dj.Schema(PREFIX + '_overlapping_schema', connection=dj.conn(**CONN_INFO))

    @test_schema
    class Unit(dj.Manual):
        definition = """
        id:  int     # simple id
        """

    # hack to update the locals dictionary
    locals()

    @test_schema
    class Cell(dj.Manual):
        definition = """
        type:  varchar(32)    # type of cell
        """

        class Unit(dj.Part):
            definition = """
            -> master
            -> Unit
            """

    test_schema.drop()
    def test_load_dependencies(self):
        schema = dj.Schema(f"{PREFIX}_load_dependencies_populate")

        @schema
        class ImageSource(dj.Lookup):
            definition = """
            image_source_id: int
            """
            contents = [(0, )]

        @schema
        class Image(dj.Imported):
            definition = """
            -> ImageSource
            ---
            image_data: longblob
            """

            def make(self, key):
                self.insert1(dict(key, image_data=dict()))

        Image.populate()

        @schema
        class Crop(dj.Computed):
            definition = """
            -> Image
            ---
            crop_image: longblob
            """

            def make(self, key):
                self.insert1(dict(key, crop_image=dict()))

        Crop.populate()
def test_drop_database():
    schema = dj.Schema(PREFIX + '_drop_test',
                       connection=dj.conn(reset=True, **CONN_INFO))
    assert_true(schema.exists)
    schema.drop()
    assert_false(schema.exists)
    schema.drop()  # should do nothing
Exemplo n.º 8
0
def grantuser(username, connection='%', password=None, adduser=False):
    """Add a user to the database. Requires admin/granting access.
    It also adds a user-specific schema
    """

    if password is None:
        password = config['standard_password']

    # establish connection
    conn = config['connection']

    # for safety flush all privileges
    conn.query("FLUSH PRIVILEGES;")

    #create user
    if adduser:
        conn.query("DROP USER IF EXISTS %s@%s;", (username, connection))
        conn.query("CREATE USER %s@%s IDENTIFIED BY %s;",
                   (username, connection, password))

    # create user-specific schema
    schema = dj.Schema(username)

    privileges = {
        '*.*': "DELETE, SELECT, INSERT, UPDATE, REFERENCES, CREATE",
        f'{username}.*': "ALL PRIVILEGES"
    }

    grantprivileges(username, conn, privileges, connection)

    return schema
Exemplo n.º 9
0
 def test_insert_failure(self):
     unprivileged = dj.Schema(schema.schema.database,
                              namespace,
                              connection=self.connection)
     unprivileged.spawn_missing_classes()
     assert_true(
         issubclass(Language, dj.Lookup)
         and len(Language()) == len(schema.Language()),
         'failed to spawn missing classes')
     Language().insert1(('Socrates', 'Greek'))
Exemplo n.º 10
0
    def test_failure_to_create_table(self):
        unprivileged = dj.Schema(schema.schema.database,
                                 namespace,
                                 connection=self.connection)

        @unprivileged
        class Try(dj.Manual):
            definition = """  # should not matter really
            id : int
            ---
            value : float
            """

        Try().insert1((1, 1.5))
Exemplo n.º 11
0
    def list_tables(jwt_payload: dict, schema_name: str):
        """
        List all tables and their type give a schema
        :param jwt_payload: Dictionary containing databaseAddress, username and password
            strings
        :type jwt_payload: dict
        :param schema_name: Name of schema to list all tables from
        :type schema_name: str
        :return: Contains a key for a each table type and it corressponding table names
        :rtype: dict
        """
        DJConnector.set_datajoint_config(jwt_payload)

        # Get list of tables names
        tables_name = dj.Schema(schema_name, create_schema=False).list_tables()

        # Dict to store list of table name for each type
        tables_dict_list = dict(manual_tables=[],
                                lookup_tables=[],
                                computed_tables=[],
                                imported_tables=[],
                                part_tables=[])

        # Loop through each table name to figure out what type it is and add them to
        # tables_dict_list
        for table_name in tables_name:
            table_type = dj.diagram._get_tier('`' + schema_name + '`.`' +
                                              table_name + '`').__name__
            if table_type == 'Manual':
                tables_dict_list['manual_tables'].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == 'Lookup':
                tables_dict_list['lookup_tables'].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == 'Computed':
                tables_dict_list['computed_tables'].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == 'Imported':
                tables_dict_list['imported_tables'].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == 'Part':
                table_name_parts = table_name.split('__')
                tables_dict_list['part_tables'].append(
                    to_camel_case(table_name_parts[-2]) + '.' +
                    to_camel_case(table_name_parts[-1]))
            else:
                raise UnsupportedTableType(table_name +
                                           ' is of unknown table type')

        return tables_dict_list
    def test_convert():
        # Configure stores
        default_store = 'external'  # naming the unnamed external store
        dj.config['stores'] = {
            default_store:
            dict(protocol='s3',
                 endpoint=S3_CONN_INFO['endpoint'],
                 bucket='migrate-test',
                 location='store',
                 access_key=S3_CONN_INFO['access_key'],
                 secret_key=S3_CONN_INFO['secret_key']),
            'shared':
            dict(protocol='s3',
                 endpoint=S3_CONN_INFO['endpoint'],
                 bucket='migrate-test',
                 location='maps',
                 access_key=S3_CONN_INFO['access_key'],
                 secret_key=S3_CONN_INFO['secret_key']),
            'local':
            dict(protocol='file',
                 location=str(
                     Path(os.path.expanduser('~'), 'temp', 'migrate-test')))
        }
        dj.config['cache'] = str(
            Path(os.path.expanduser('~'), 'temp', 'dj-cache'))

        dj.config['database.password'] = CONN_INFO['password']
        dj.config['database.user'] = CONN_INFO['user']
        dj.config['database.host'] = CONN_INFO['host']
        schema = dj.Schema('djtest_blob_migrate')

        # Test if migration throws unexpected exceptions
        _migrate_dj011_blob(schema, default_store)

        # Test Fetch
        test_mod = dj.create_virtual_module('test_mod', 'djtest_blob_migrate')
        r1 = test_mod.A.fetch('blob_share', order_by='id')
        assert_equal(r1[1][1], 2)

        # Test Insert
        test_mod.A.insert1({
            'id': 3,
            'blob_external': [9, 8, 7, 6],
            'blob_share': {
                'number': 5
            }
        })
        r2 = (test_mod.A & 'id=3').fetch1()
        assert_equal(r2['blob_share']['number'], 5)
Exemplo n.º 13
0
    def _list_tables(jwt_payload: dict, schema_name: str) -> dict:
        """
        List all tables and their type given a schema.

        :param jwt_payload: Dictionary containing databaseAddress, username, and password
            strings
        :type jwt_payload: dict
        :param schema_name: Name of schema to list all tables from
        :type schema_name: str
        :return: Contains a key for each table type where values are the respective list of
            table names
        :rtype: dict
        """
        _DJConnector._set_datajoint_config(jwt_payload)

        # Get list of tables names
        tables_name = dj.Schema(schema_name, create_schema=False).list_tables()
        # Dict to store list of table name for each type
        tables_dict_list = dict(manual=[],
                                lookup=[],
                                computed=[],
                                imported=[],
                                part=[])
        # Loop through each table name to figure out what type it is and add them to
        # tables_dict_list
        for table_name in tables_name:
            table_type = dj.diagram._get_tier("`" + schema_name + "`.`" +
                                              table_name + "`").__name__
            if table_type == "Manual":
                tables_dict_list["manual"].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == "Lookup":
                tables_dict_list["lookup"].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == "Computed":
                tables_dict_list["computed"].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == "Imported":
                tables_dict_list["imported"].append(
                    dj.utils.to_camel_case(table_name))
            elif table_type == "Part":
                table_name_parts = table_name.split("__")
                tables_dict_list["part"].append(
                    to_camel_case(table_name_parts[-2]) + "." +
                    to_camel_case(table_name_parts[-1]))
            else:
                raise UnsupportedTableType(table_name +
                                           " is of unknown table type")
        return tables_dict_list
Exemplo n.º 14
0
def test_uppercase_schema():
    # https://github.com/datajoint/datajoint-python/issues/564
    dj.conn(**CONN_INFO_ROOT, reset=True)
    schema1 = dj.Schema('Schema_A')

    @schema1
    class Subject(dj.Manual):
        definition = """
        name: varchar(32)
        """

    Schema_A = dj.VirtualModule('Schema_A', 'Schema_A')

    schema2 = dj.Schema('schema_b')

    @schema2
    class Recording(dj.Manual):
        definition = """
        -> Schema_A.Subject
        id: smallint
        """

    schema2.drop()
    schema1.drop()
Exemplo n.º 15
0
    def test_convert():
        # Configure stores
        default_store = "external"  # naming the unnamed external store
        dj.config["stores"] = {
            default_store: dict(
                protocol="s3",
                endpoint=S3_CONN_INFO["endpoint"],
                bucket=S3_MIGRATE_BUCKET,
                location="store",
                access_key=S3_CONN_INFO["access_key"],
                secret_key=S3_CONN_INFO["secret_key"],
            ),
            "shared": dict(
                protocol="s3",
                endpoint=S3_CONN_INFO["endpoint"],
                bucket=S3_MIGRATE_BUCKET,
                location="maps",
                access_key=S3_CONN_INFO["access_key"],
                secret_key=S3_CONN_INFO["secret_key"],
            ),
            "local": dict(
                protocol="file",
                location=str(Path(os.path.expanduser("~"), "temp", S3_MIGRATE_BUCKET)),
            ),
        }
        dj.config["cache"] = str(Path(os.path.expanduser("~"), "temp", "dj-cache"))

        dj.config["database.password"] = CONN_INFO["password"]
        dj.config["database.user"] = CONN_INFO["user"]
        dj.config["database.host"] = CONN_INFO["host"]
        schema = dj.Schema("djtest_blob_migrate")

        # Test if migration throws unexpected exceptions
        _migrate_dj011_blob(schema, default_store)

        # Test Fetch
        test_mod = dj.create_virtual_module("test_mod", "djtest_blob_migrate")
        r1 = test_mod.A.fetch("blob_share", order_by="id")
        assert_equal(r1[1][1], 2)

        # Test Insert
        test_mod.A.insert1(
            {"id": 3, "blob_external": [9, 8, 7, 6], "blob_share": {"number": 5}}
        )
        r2 = (test_mod.A & "id=3").fetch1()
        assert_equal(r2["blob_share"]["number"], 5)
Exemplo n.º 16
0
    def test_part():
        # Lookup and part with the same name.  See issue #365
        local_schema = dj.Schema(schema.database)

        @local_schema
        class Type(dj.Lookup):
            definition = """
            type :  varchar(255)
            """
            contents = zip(('Type1', 'Type2', 'Type3'))

        @local_schema
        class TypeMaster(dj.Manual):
            definition = """
            master_id : int
            """

            class Type(dj.Part):
                definition = """
Exemplo n.º 17
0
def schemas_simple(connection):
    """Simple test schemas."""

    group1_simple = dj.Schema(f"{SCHEMA_PREFIX}group1_simple", connection=connection)
    group2_simple = dj.Schema(f"{SCHEMA_PREFIX}group2_simple", connection=connection)

    @group1_simple
    class TableA(dj.Lookup):
        definition = """
        a_id: int
        ---
        a_name: varchar(30)
        """
        contents = [
            (
                0,
                "Raphael",
            ),
            (
                1,
                "Bernie",
            ),
        ]

    @group1_simple
    class TableB(dj.Lookup):
        definition = """
        -> TableA
        b_id: int
        ---
        b_number: float
        """
        contents = [
            (0, 10, 22.12),
            (
                0,
                11,
                -1.21,
            ),
            (
                1,
                21,
                7.77,
            ),
        ]

    @group2_simple
    class DiffTableB(dj.Lookup):
        definition = """
        -> TableA
        bs_id: int
        ---
        bs_number: float
        """
        contents = [
            (0, -10, -99.99),
            (
                0,
                -11,
                287.11,
            ),
        ]

    @group1_simple
    class TableC(dj.Lookup):
        definition = """
        -> TableB
        c_id: int
        ---
        c_int: int
        """
        contents = [
            (0, 10, 100, -8),
            (
                0,
                11,
                200,
                -9,
            ),
            (
                0,
                11,
                300,
                -7,
            ),
        ]

    @group1_simple
    class PlotlyTable(dj.Lookup):
        definition = """
        p_id: int
        ---
        plot: longblob
        """
        contents = [
            (
                2,
                dict(
                    data=[
                        dict(
                            x=[1, 2, 3],
                            y=[2, 6, 3],
                            type="scatter",
                            mode="lines+markers",
                            marker=dict(color="red"),
                        ),
                        dict(type="bar", x=[1, 2, 3], y=[2, 5, 3]),
                    ],
                    layout=dict(title="A Fancy Plot"),
                ),
            )
        ]

    yield group1_simple, group2_simple

    group2_simple.drop()
    group1_simple.drop()
Exemplo n.º 18
0
"""
Sample schema with realistic tables for testing
"""

import random
import numpy as np
import datajoint as dj
import inspect
from . import PREFIX, CONN_INFO

schema = dj.Schema(PREFIX + '_test1', connection=dj.conn(**CONN_INFO))


@schema
class TTest(dj.Lookup):
    """
    doc string
    """
    definition = """
    key   :   int     # key
    ---
    value   :   int     # value
    """
    contents = [(k, 2 * k) for k in range(10)]


@schema
class TTest2(dj.Manual):
    definition = """
    key   :   int     # key
    ---
Exemplo n.º 19
0
        for k, v in layout.items()
    }
    t = adapted.Layout()
    t.insert1((0, layout))
    result = t.fetch1("layout")
    assert_dict_equal(result, layout)

    t.delete()
    c.delete()

    dj.errors._switch_filepath_types(False)
    dj.errors._switch_adapted_types(False)


# test spawned classes
local_schema = dj.Schema(adapted.schema_name)
local_schema.spawn_missing_classes()


def test_adapted_spawned():
    dj.errors._switch_adapted_types(True)
    c = Connectivity()  # a spawned class
    graphs = [
        nx.lollipop_graph(4, 2),
        nx.star_graph(5),
        nx.barbell_graph(3, 1),
        nx.cycle_graph(5),
    ]
    c.insert((i, g) for i, g in enumerate(graphs))
    returned_graphs = c.fetch("conn_graph", order_by="connid")
    for g1, g2 in zip(graphs, returned_graphs):
Exemplo n.º 20
0
"""tables for recording schema
"""

import datajoint as dj
from loris.database.schema.base import ManualLookup
from loris.database.attributes import lookupname

schema = dj.Schema('recordings')


@schema
class RecordingType(ManualLookup, dj.Manual):
    primary_comment = 'type of recording - e.g. TSeries, ZStack'


@schema
class RecordingSolution(ManualLookup, dj.Manual):
    primary_comment = 'type of solution - e.g. saline, saline + OA'
Exemplo n.º 21
0
import datajoint as dj

schema = dj.Schema()


@schema
class Student(dj.Manual):
    definition = """
    student_id : int unsigned   # university-wide ID number
    ---
    first_name      : varchar(40)
    last_name       : varchar(40)
    sex             : enum('F', 'M', 'U')
    date_of_birth   : date
    home_address    : varchar(120) # mailing street address
    home_city       : varchar(60)  # mailing address
    home_state      : char(2)      # US state acronym: e.g. OH
    home_zip        : char(10)     # zipcode e.g. 93979-4979
    home_phone      : varchar(20)  # e.g. 414.657.6883x0881
    """


@schema
class Department(dj.Manual):
    definition = """
    dept : varchar(6)   # abbreviated department name, e.g. BIOL
    ---
    dept_name    : varchar(200)  # full department name
    dept_address : varchar(200)  # mailing address
    dept_phone   : varchar(20)
    """
Exemplo n.º 22
0
import datajoint as dj
import os
import pandas as pd
import numpy as np
import pyarrow as pa
import pyarrow.csv as csv
import pyarrow.dataset as ds
import pyarrow.parquet as pq
import pyarrow.compute as pc
import getpass
from .utils import create_config

create_config()

lab = dj.Schema('clendenen_lab')


@lab
class Host(dj.Lookup):
    definition = """
    host : varchar(20)   # unique user name
    ---
    """
    contents = [
        ['spruce.symlink.pw'],
        ['yew.symlink.pw'],
    ]


@lab
class User(dj.Lookup):
import datajoint as dj
import numpy as np

from . import PREFIX, CONN_INFO
from numpy.testing import assert_array_equal
from nose.tools import assert_true

schema_in = dj.Schema(PREFIX + '_test_bypass_serialization_in',
                      connection=dj.conn(**CONN_INFO))

schema_out = dj.Schema(PREFIX + '_test_blob_bypass_serialization_out',
                       connection=dj.conn(**CONN_INFO))

test_blob = np.array([1, 2, 3])


@schema_in
class Input(dj.Lookup):
    definition = """
    id:                 int
    ---
    data:               blob
    """
    contents = [(0, test_blob)]


@schema_out
class Output(dj.Manual):
    definition = """
    id:                 int
    ---
Exemplo n.º 24
0
"""some core schema (mostly lookup schema)
"""

import datajoint as dj

from loris.database.table_mixin import ManualLookup
from loris.database.attributes import lookupname


schema = dj.Schema('core')


@schema
class LookupName(ManualLookup, dj.Manual):
    primary_comment = 'identifiable name - e.g. stimulus, xml_file, array'


@schema
class ExtensionLookupName(ManualLookup, dj.Manual):
    primary_comment = 'identifiable name - e.g. prairieview, axograph'


@schema
class DataLookupName(ManualLookup, dj.Manual):
    primary_comment = 'identifiable name - e.g. stimulus, array, movie'


@schema
class FileLookupName(ManualLookup, dj.Manual):
    primary_comment = 'identifiable name - e.g. xml_file, settings'
Exemplo n.º 25
0
"""
a schema for testing external attributes
"""

import tempfile
import datajoint as dj

from . import PREFIX, CONN_INFO, S3_CONN_INFO
import numpy as np

schema = dj.Schema(PREFIX + "_extern", connection=dj.conn(**CONN_INFO))

stores_config = {
    "raw":
    dict(protocol="file", location=tempfile.mkdtemp()),
    "repo":
    dict(stage=tempfile.mkdtemp(),
         protocol="file",
         location=tempfile.mkdtemp()),
    "repo-s3":
    dict(S3_CONN_INFO,
         protocol="s3",
         location="dj/repo",
         stage=tempfile.mkdtemp()),
    "local":
    dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)),
    "share":
    dict(S3_CONN_INFO,
         protocol="s3",
         location="dj/store/repo",
         subfolding=(2, 4)),
Exemplo n.º 26
0
def schema(connection):
    schema = dj.Schema('add_types', connection=connection)
    yield schema
    schema.drop()
Exemplo n.º 27
0
"""
A simple, abstract schema to test relational algebra
"""
import random
import datajoint as dj
import itertools
import hashlib
import uuid
import faker

from . import PREFIX, CONN_INFO
import numpy as np
from datetime import date, timedelta

schema = dj.Schema(PREFIX + "_relational",
                   locals(),
                   connection=dj.conn(**CONN_INFO))


@schema
class IJ(dj.Lookup):
    definition = """  # tests restrictions
    i  : int
    j  : int
    """
    contents = list(dict(i=i, j=j + 2) for i in range(3) for j in range(3))


@schema
class JI(dj.Lookup):
    definition = """  # tests restrictions by relations when attributes are reordered
Exemplo n.º 28
0
import numpy as np
import datajoint as dj
from datajoint.blob import pack, unpack

from nose.tools import assert_equal, assert_true, assert_tuple_equal, assert_false
from numpy.testing import assert_array_equal

from . import PREFIX, CONN_INFO

schema = dj.Schema(PREFIX + "_test1",
                   locals(),
                   connection=dj.conn(**CONN_INFO))


@schema
class Blob(dj.Manual):
    definition = """  # diverse types of blobs
    id : int
    -----
    comment  :  varchar(255)
    blob  : longblob
    """


def insert_blobs():
    """
    This function inserts blobs resulting from the following datajoint-matlab code:

        self.insert({
             1  'simple string'    'character string'
             2  '1D vector'        1:15:180
Exemplo n.º 29
0
class TestTransactions:
    """
    test transaction management
    """

    schema = dj.Schema(
        PREFIX + "_transactions", locals(), connection=dj.conn(**CONN_INFO)
    )

    @schema
    class Subjects(dj.Manual):
        definition = """
        #Basic subject
        subject_id                  : int      # unique subject id
        ---
        real_id                     :  varchar(40)    #  real-world name
        species = "mouse"           : enum('mouse', 'monkey', 'human')   # species
        """

    @classmethod
    def setup_class(cls):
        cls.relation = cls.Subjects()
        cls.conn = dj.conn(**CONN_INFO)

    def teardown(self):
        self.relation.delete_quick()

    def test_active(self):
        with self.conn.transaction as conn:
            assert_true(conn.in_transaction, "Transaction is not active")

    def test_transaction_rollback(self):
        """Test transaction cancellation using a with statement"""
        tmp = np.array(
            [(1, "Peter", "mouse"), (2, "Klara", "monkey")],
            self.relation.heading.as_dtype,
        )

        self.relation.delete()
        with self.conn.transaction:
            self.relation.insert1(tmp[0])
        try:
            with self.conn.transaction:
                self.relation.insert1(tmp[1])
                raise DataJointError("Testing rollback")
        except DataJointError:
            pass
        assert_equal(
            len(self.relation),
            1,
            "Length is not 1. Expected because rollback should have happened.",
        )
        assert_equal(
            len(self.relation & "subject_id = 2"),
            0,
            "Length is not 0. Expected because rollback should have happened.",
        )

    def test_cancel(self):
        """Tests cancelling a transaction explicitly"""
        tmp = np.array(
            [(1, "Peter", "mouse"), (2, "Klara", "monkey")],
            self.relation.heading.as_dtype,
        )
        self.relation.delete_quick()
        self.relation.insert1(tmp[0])
        self.conn.start_transaction()
        self.relation.insert1(tmp[1])
        self.conn.cancel_transaction()
        assert_equal(
            len(self.relation),
            1,
            "Length is not 1. Expected because rollback should have happened.",
        )
        assert_equal(
            len(self.relation & "subject_id = 2"),
            0,
            "Length is not 0. Expected because rollback should have happened.",
        )
Exemplo n.º 30
0
"""Anatomy Tables
"""

import datajoint as dj
from loris.database.schema.base import ManualLookup, PRIMARY_NAME, COMMENTS
from loris.database.attributes import lookupname, tags

schema = dj.Schema('anatomy')


@schema
class NeuronSection(ManualLookup, dj.Manual):
    primary_comment = 'section of a neuron - e.g. dendrite, soma'


@schema
class BrainArea(ManualLookup, dj.Manual):
    primary_comment = 'brain area - e.g. medulla'


@schema
class CellType(dj.Manual):
    definition = f"""
    {PRIMARY_NAME.format(name='cell_type', comment='standard cell type name - e.g. dm8')}
    ---
    neurotransmitters = null : <tags> # neurotransmitter of cell
    receptors = null : <tags> # receptors expressed by cell
    {COMMENTS}
    """