Beispiel #1
0
def start_connection():
    """
    Starts up the datajoint database and signs in with user and passoword + returns the database name
    """

    if dj.config['database.user'] != "root":
        try:
            dj.config['database.host'] = ip
        except Exception as e:
            print("Could not connect to database: ", e)
            return None, None

        dj.config['database.user'] = '******'
        dj.config[
            'database.password'] = '******' if sys.platform != 'darwin' else 'fede'
        dj.config['database.safemode'] = True
        dj.config['safemode'] = True

        dj.config["enable_python_native_blobs"] = True

        dj.conn()

    try:
        schema = dj.schema(dbname)
    except Exception as e:
        raise ValueError(
            f'\n\nFailed to connect, if on windows make sure that MySql57 service is running.\n{e}'
        )

    return schema
Beispiel #2
0
def start_connection():
    """
    Starts up the datajoint database and signs in with user and passoword + returns the database name
    
    docker compose yaml file:
    D:\Dropbox (UCL - SWC)\Rotation_vte\mysql-server\docker-compose.yml

    Data are here:
    D:\Dropbox (UCL - SWC)\Rotation_vte\mysql-server\data\Database

    """
    if have_dj:
        dbname = "LOCOMOTION"  # Name of the database subfolder with data
        if dj.config["database.user"] != "root":

            dj.config["database.host"] = ip
            dj.config["database.user"] = "******"
            dj.config["database.password"] = psw
            dj.config["database.safemode"] = True
            dj.config["safemode"] = False
            dj.config["enable_python_native_blobs"] = True

            dj.conn()

        schema = dj.schema(dbname)
        return dbname, schema
    else:
        return None, None
Beispiel #3
0
def start_connection():
    """
    Starts up the datajoint database and signs in with user and passoword + returns the database name
    """
    try:
        if dj.config['database.user'] != "root":
            try:
                dj.config['database.host'] = ip
            except Exception as e:
                print("Could not connect to database: ", e)
                return None, None

            dj.config['database.user'] = '******'
            dj.config['database.password'] = '******'
            dj.config['database.safemode'] = True
            dj.config['safemode'] = True

            dj.config["enable_python_native_blobs"] = True

            dj.conn()

        schema = dj.schema(dbname)
    except Exception as e:
        raise ValueError(
            f'Failed to start server, make sure youve launched docker-compose from M/mysql-server.\n{e}'
        )
    return schema
Beispiel #4
0
 def test_reject_insecure():
     dj.conn(
         CONN_INFO['host'],
         user='******',
         password='******',
         use_tls=False,
         reset=True).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
Beispiel #5
0
 def set_datajoint_config(jwt_payload: dict):
     """
     Method to set credentials for database
     :param jwt_payload: Dictionary containing databaseAddress, username and password
         strings
     :type jwt_payload: dict
     """
     dj.config['database.host'] = jwt_payload['databaseAddress']
     dj.config['database.user'] = jwt_payload['username']
     dj.config['database.password'] = jwt_payload['password']
     dj.conn(reset=True)
Beispiel #6
0
def _update_one_session(key):
    log.info('\n======================================================')
    log.info('Waveform update for key: {k}'.format(k=key))

    #
    # Find Ephys Recording
    #
    key = (experiment.Session & key).fetch1()
    sinfo = ((lab.WaterRestriction
              * lab.Subject.proj()
              * experiment.Session.proj(..., '-session_time')) & key).fetch1()

    rigpaths = get_ephys_paths()
    h2o = sinfo['water_restriction_number']

    sess_time = (datetime.min + key['session_time']).time()
    sess_datetime = datetime.combine(key['session_date'], sess_time)

    for rigpath in rigpaths:
        dpath, dglob = _get_sess_dir(rigpath, h2o, sess_datetime)
        if dpath is not None:
            break

    if dpath is not None:
        log.info('Found session folder: {}'.format(dpath))
    else:
        log.warning('Error - No session folder found for {}/{}. Skipping...'.format(h2o, key['session_date']))
        return False

    try:
        clustering_files = _match_probe_to_ephys(h2o, dpath, dglob)
    except FileNotFoundError as e:
        log.warning(str(e) + '. Skipping...')
        return False

    with ephys.Unit.connection.transaction:
        for probe_no, (f, cluster_method, npx_meta) in clustering_files.items():
            try:
                log.info('------ Start loading clustering results for probe: {} ------'.format(probe_no))
                loader = cluster_loader_map[cluster_method]
                dj.conn().ping()
                _add_spike_sites_and_depths(loader(sinfo, *f), probe_no, npx_meta, rigpath)
            except (ProbeInsertionError, ClusterMetricError, FileNotFoundError) as e:
                dj.conn().cancel_transaction()  # either successful fix of all probes, or none at all
                if isinstance(e, ProbeInsertionError):
                    log.warning('Probe Insertion Error: \n{}. \nSkipping...'.format(str(e)))
                else:
                    log.warning('Error: {}'.format(str(e)))
                return False

        with dj.config(safemode=False):
            (ephys.UnitCellType & key).delete()

    return True
Beispiel #7
0
    def conn(self, *args, **kwargs):
        """connect to database with hostname, username, and password.
        """
        newargs = dict(zip(['host', 'user', 'password'], args))
        newargs.update(kwargs)
        already_connected = 'connection' in self
        # self.datajoint_configuration()
        self.connect_ssh()
        reconfigure_dj = False
        # database host
        if self['database.host'] is None and 'host' not in newargs and not already_connected:
            host = input("What is the host address for your MySQL "
                         "instance (defaults to `127.0.0.1`)? ")
            if not host:
                host = '127.0.0.1'
            self['database.host'] = host
            reconfigure_dj = True
        # database port
        if self['database.port'] is None and 'port' not in newargs and not already_connected:
            port = input("What is the port for your MySQL "
                         "instance (defaults to `3306`)? ")
            if not port:
                port = 3306
            else:
                port = int(port)
            self['database.port'] = port
            reconfigure_dj = True
        if self['database.user'] is None and 'user' not in newargs and not already_connected:
            user = input("Please enter your Loris/MySQL username: "******"Please enter Loris/MySQL password: ")
            self['database.password'] = pw
            reconfigure_dj = True

        if reconfigure_dj:
            self.datajoint_configuration()

        if self['database.host'] == 'mysql' and 'host' not in newargs:
            try:
                self['connection'] = dj.conn(**newargs)
            except pymysql.OperationalError:
                newargs.pop('host')
                self['connection'] = dj.conn('127.0.0.1', **newargs)
        else:
            self['connection'] = dj.conn(**newargs)

        # update backup context with schemata
        if newargs.get('refresh', False):
            self.refresh_schema()
        dj.config['backup_context'].update(self['schemata'])
        return self['connection']
 def check_password(self, password):
     # check password in mysql database
     try:
         dj.conn(
             None, self.user_name, password,
             reset=True
         )
         success = True
     except Exception:
         success = False
     config.conn(reset=True)
     return success
Beispiel #9
0
def test_persistent_dj_conn():
    """
    conn() method should provide persistent connection across calls.
    Setting reset=True should create a new persistent connection.
    """
    c1 = dj.conn(**CONN_INFO)
    c2 = dj.conn()
    c3 = dj.conn(**CONN_INFO)
    c4 = dj.conn(reset=True, **CONN_INFO)
    c5 = dj.conn(**CONN_INFO)
    assert_true(c1 is c2)
    assert_true(c1 is c3)
    assert_true(c1 is not c4)
    assert_true(c4 is c5)
def test_persistent_dj_conn():
    """
    conn() method should provide persistent connection across calls.
    Setting reset=True should create a new persistent connection.
    """
    c1 = dj.conn(**CONN_INFO)
    c2 = dj.conn()
    c3 = dj.conn(**CONN_INFO)
    c4 = dj.conn(reset=True, **CONN_INFO)
    c5 = dj.conn(**CONN_INFO)
    assert_true(c1 is c2)
    assert_true(c1 is c3)
    assert_true(c1 is not c4)
    assert_true(c4 is c5)
Beispiel #11
0
def render_page_contents(n_clicks, user, password, current_contents):

    if n_clicks:
        dj.config['database.host'] = '127.0.0.1'
        dj.config['database.user'] = user
        dj.config['database.password'] = password

        try:
            dj.conn(reset=True).connect()
            return [tabs] + ['Connected']
        except Exception as e:
            return [current_contents] + [f'Connection failed: {str(e)}']
    else:
        return [current_contents] + ['Not connected']
Beispiel #12
0
def undo_amplitude_scaling():

    amp_scale = 1 / 3.01

    units2fix = ephys.Unit & FixedAmpUnit  # only fix those units that underwent fix_0007
    units2fix = units2fix - (UndoFixedAmpUnit & 'fixed=1'
                             )  # exclude those that were already fixed

    if not units2fix:
        return

    # safety check, no jrclust results and no npx 1.0
    assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0
    assert len(units2fix.proj() * ephys.ProbeInsertion
               & 'probe_type LIKE "neuropixels 1.0%"') == 0

    fix_hist_key = {
        'fix_name': pathlib.Path(__file__).name,
        'fix_timestamp': datetime.now()
    }
    FixHistory.insert1(fix_hist_key)

    for unit in tqdm(units2fix.proj('unit_amp').fetch(as_dict=True)):
        amp = unit.pop('unit_amp')
        with dj.conn().transaction:
            (ephys.Unit & unit)._update('unit_amp', amp * amp_scale)
            FixedAmpUnit.insert1({
                **fix_hist_key,
                **unit, 'fixed': True,
                'scale': amp_scale
            })

    # delete cluster_quality figures and remake figures with updated unit_amp
    with dj.config(safemode=False):
        (report.ProbeLevelReport & units2fix).delete()
Beispiel #13
0
def my_checkpoint(nnfabrik):
    """
    Clones the connection from given nnfabrik to complete Checkpoint transaction within a make.
    """
    conn_clone = clone_conn(dj.conn())
    schema_clone = CustomSchema(nnfabrik.schema.database,
                                connection=conn_clone)

    @schema_clone
    class TransferredCheckpoint(dj.Manual):
        storage = "minio"

        @property
        def definition(self):
            definition = """
            # Checkpoint table
            -> nnfabrik.Trainer
            -> nnfabrik.Dataset
            -> nnfabrik.Model
            -> nnfabrik.Seed
            collapsed_history:                 varchar(64)      # transfer         
            transfer_step:                     int              # transfer         
            data_transfer:                     bool             # flag if we do data transfer
            epoch:                             int              # epoch of creation
            ---
            score:                             float            # current score at epoch
            state:                             attach@{storage} # current state
            ->[nullable] nnfabrik.Fabrikant
            trainedmodel_ts=CURRENT_TIMESTAMP: timestamp        # UTZ timestamp at time of insertion
            """.format(storage=self.storage)
            return definition

    return TransferredCheckpoint
Beispiel #14
0
def my_checkpoint(nnfabrik):
    conn_clone = clone_conn(dj.conn())
    schema_clone = CustomSchema(nnfabrik.schema.database, connection=conn_clone)

    @schema_clone
    class Checkpoint(dj.Manual):
        storage = "minio"

        @property
        def definition(self):
            definition = f"""
            # Checkpoint table
            -> nnfabrik.Trainer
            -> nnfabrik.Dataset
            -> nnfabrik.Model
            -> nnfabrik.Seed
            epoch:                             int          # epoch of creation
            ---
            score:                             float        # current score at epoch
            state:                             attach@{self.storage}  # current state
            ->[nullable] nnfabrik.Fabrikant
            trainedmodel_ts=CURRENT_TIMESTAMP: timestamp    # UTZ timestamp at time of insertion
            """
            return definition

    return Checkpoint
def test_overlapping_name():
    test_schema = dj.schema(PREFIX + '_overlapping_schema', connection=dj.conn(**CONN_INFO))

    @test_schema
    class Unit(dj.Manual):
        definition = """
        id:  int     # simple id
        """

    # hack to update the locals dictionary
    locals()

    @test_schema
    class Cell(dj.Manual):
        definition = """
        type:  varchar(32)    # type of cell
        """

        class Unit(dj.Part):
            definition = """
            -> master
            -> Unit
            """

    test_schema.drop()
def test_drop_database():
    schema = dj.schema(PREFIX + '_drop_test',
                       connection=dj.conn(reset=True, **CONN_INFO))
    assert_true(schema.exists)
    schema.drop()
    assert_false(schema.exists)
    schema.drop()  # should do nothing
Beispiel #17
0
 def conn(self, *args, **kwargs):
     """connect to database with hostname, username, and password.
     """
     self.datajoint_configuration()
     self.connect_ssh()
     self['connection'] = dj.conn(*args, **kwargs)
     return self['connection']
Beispiel #18
0
def update_photostim_event_time():
    """
    Updating photostim event time for Susu's sessions where behavior is from Rig3 (Rig3 as part of the behavior file name)
    For these sessions, photostimulation is late-delay, i.e. photostim onset is 0.5 second prior to the response-period (go-cue)
    """
    for session_key in (experiment.Session & 'username = "******"').fetch('KEY'):
        behav_fname = (behav_ingest.BehaviorIngest.BehaviorFile
                       & session_key).fetch1('behavior_file')
        rig_name = re.search('Recording(Rig\d)_', behav_fname)
        if rig_name is None:
            log.warning(
                'No rig-info in behavior file ({}) for session: {}. Skipping...'
                .format(behav_fname, session_key))
            continue

        rig_name = rig_name.groups()[0]
        log.info('Found rig-name: {} from behavior file ({})'.format(
            rig_name, behav_fname))

        if rig_name == "Rig3":
            log.info(
                'Matching "RecordingRig3", proceed with updating photostim onset'
            )
            with dj.conn().transaction:
                for trial_key in (experiment.PhotostimTrial
                                  & session_key).fetch('KEY',
                                                       order_by='trial'):
                    # get go-cue, compute photostim onset
                    go_cue_time = (
                        experiment.TrialEvent & trial_key
                        & 'trial_event_type = "go"').fetch1('trial_event_time')
                    photostim_onset = float(go_cue_time) - 0.5
                    # update
                    (experiment.PhotostimEvent & trial_key)._update(
                        'photostim_event_time', photostim_onset)
Beispiel #19
0
def connection():
    dj.config['safemode'] = False
    connection = dj.conn(host=getenv('TEST_DB_SERVER'),
                         user=getenv('TEST_DB_USER'),
                         password=getenv('TEST_DB_PASS'), reset=True)
    schema1 = dj.Schema('schema1', connection=connection)
    @schema1
    class TableA(dj.Manual):
        definition = """
        id: int
        ---
        name: varchar(30)
        """

    schema2 = dj.Schema('schema2', connection=connection)
    @schema2
    class TableB(dj.Manual):
        definition = """
        id: int
        ---
        number: float
        """
    yield connection
    schema1.drop()
    schema2.drop()
    connection.close()
    dj.config['safemode'] = True
def test_overlapping_name():
    test_schema = dj.schema(PREFIX + '_overlapping_schema',
                            connection=dj.conn(**CONN_INFO))

    @test_schema
    class Unit(dj.Manual):
        definition = """
        id:  int     # simple id
        """

    # hack to update the locals dictionary
    locals()

    @test_schema
    class Cell(dj.Manual):
        definition = """
        type:  varchar(32)    # type of cell
        """

        class Unit(dj.Part):
            definition = """
            -> master
            -> Unit
            """

    test_schema.drop()
Beispiel #21
0
def undo_bitvolt_scaling(insertion_keys={}):
    """
    This is a one-time operation only - Oct 2020
    """

    units2fix = ephys.Unit * ephys.ClusteringLabel & insertion_keys & 'quality_control = 1'  # only on QC results
    units2fix = units2fix - (UndoBitVoltScalingAmpUnit & 'fixed=1')  # exclude those that were already fixed

    if not units2fix:
        return

    # safety check, no jrclust results
    assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0

    fix_hist_key = {'fix_name': pathlib.Path(__file__).name,
                    'fix_timestamp': datetime.now()}
    FixHistory.insert1(fix_hist_key)

    for unit in tqdm(units2fix.proj('unit_amp').fetch(as_dict=True)):
        probe_type = (ephys.ProbeInsertion & unit).fetch1('probe_type')
        bit_volts = npx_bit_volts[re.match('neuropixels (\d.0)', probe_type).group()]

        amp = unit.pop('unit_amp')
        with dj.conn().transaction:
            (ephys.Unit & unit)._update('unit_amp', amp * 1/bit_volts)
            UndoBitVoltScalingAmpUnit.insert1({**fix_hist_key, **unit, 'fixed': True, 'scale': 1/bit_volts})

    # delete cluster_quality figures and remake figures with updated unit_amp
    with dj.config(safemode=False):
        (report.ProbeLevelReport & units2fix).delete()
Beispiel #22
0
def apply_amplitude_scaling(insertion_keys={}):
    """
    This fix is identical to that of fix_0007 - apply an amplitude scaling (3.01) to npx 2.0 probe units
    The difference is that this fix only apply the scaling to mean waveform, and not unit_amp
    """

    amp_scale = 3.01

    npx2_inserts = ephys.ProbeInsertion & insertion_keys & 'probe_type LIKE "neuropixels 2.0%"'

    units2fix = ephys.Unit * ephys.ClusteringLabel & npx2_inserts.proj() & 'quality_control = 1'
    units2fix = units2fix - (FixedAmpWfUnit & 'fixed=1')  # exclude those that were already fixed

    if not units2fix:
        return

    # safety check, no jrclust results
    assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0

    fix_hist_key = {'fix_name': pathlib.Path(__file__).name,
                    'fix_timestamp': datetime.now()}
    FixHistory.insert1(fix_hist_key)

    for unit in tqdm(units2fix.proj('waveform').fetch(as_dict=True)):
        wf = unit.pop('waveform')
        with dj.conn().transaction:
            (ephys.Unit & unit)._update('waveform', wf * amp_scale)
            FixedAmpWfUnit.insert1({**fix_hist_key, **unit, 'fixed': True, 'scale': amp_scale})
Beispiel #23
0
    def load_ccf_annotation(cls):
        """
        Load the CCF r3 20 uM Dataset.
        Requires that dj.config['ccf.r3_20um_path'] be set to the location
        of the CCF Annotation tif stack.
        """
        log.info('CCFAnnotation.load_ccf_annotation(): start')

        version_name = dj.config['custom']['ccf_data_paths']['version_name']
        stack_path = dj.config['custom']['ccf_data_paths']['annotation_tif']

        stack = imread(stack_path)  # load reference stack

        log.info('.. loaded stack of shape {} from {}'.format(
            stack.shape, stack_path))

        # iterate over ccf ontology region id/name records,
        regions = get_ontology_regions()
        chunksz, ib_args = 50000, {
            'skip_duplicates': True,
            'allow_direct_insert': True
        }

        for idx, (region_id, r) in enumerate(regions.iterrows()):

            region_id = int(region_id)

            log.info('.. loading region {} ({}/{}) ({})'.format(
                region_id, idx, len(regions), r.region_name))

            # extracting filled volumes from stack in scaled [[x,y,z]] shape,
            vol = np.array(np.where(stack == region_id)).T[:, [2, 1, 0]] * 20

            if not vol.shape[0]:
                log.info('.. region {} volume: shape {} - skipping'.format(
                    region_id, vol.shape))
                continue

            log.info('.. region {} volume: shape {}'.format(
                region_id, vol.shape))

            with dj.conn().transaction:
                with InsertBuffer(CCF, chunksz, **ib_args) as buf:
                    for vox in vol:
                        buf.insert1((CCFLabel.CCF_R3_20UM_ID, *vox))
                        buf.flush()

                with InsertBuffer(cls, chunksz, **ib_args) as buf:
                    for vox in vol:
                        buf.insert1({
                            'ccf_label_id': CCFLabel.CCF_R3_20UM_ID,
                            'ccf_x': vox[0],
                            'ccf_y': vox[1],
                            'ccf_z': vox[2],
                            'annotation_version': version_name,
                            'annotation': r.region_name
                        })
                        buf.flush()

        log.info('.. done.')
Beispiel #24
0
def virtual_module():
    dj.config['safemode'] = False
    connection = dj.conn(host=getenv('TEST_DB_SERVER'),
                         user=getenv('TEST_DB_USER'),
                         password=getenv('TEST_DB_PASS'),
                         reset=True)
    schema = dj.Schema('filter')

    @schema
    class Student(dj.Lookup):
        definition = """
        student_id: int
        ---
        student_name: varchar(50)
        student_ssn: varchar(20)
        student_enroll_date: datetime
        student_balance: float
        student_parking_lot=null : varchar(20)
        student_out_of_state: bool
        """
        contents = [(i, faker.name(), faker.ssn(),
                     faker.date_between_dates(date_start=date(2021, 1, 1),
                                              date_end=date(2021, 1, 31)),
                     round(randint(1000, 3000),
                           2), choice([None, 'LotA', 'LotB',
                                       'LotC']), bool(getrandbits(1)))
                    for i in range(100)]

    yield dj.VirtualModule('filter', 'filter')
    schema.drop()
    connection.close()
    dj.config['safemode'] = True
Beispiel #25
0
def extend_electrode_config_name():

    conn = dj.conn()

    lab_db = lab.schema.database
    ephys_db = ephys.schema.database
    hist_db = histology.schema.database

    # ephys.Unit.table_name
    fixes = {
        lab_db: [
            lab.ElectrodeConfig, lab.ElectrodeConfig.Electrode,
            lab.ElectrodeConfig.ElectrodeGroup
        ],
        ephys_db: [ephys.ProbeInsertion, ephys.LFP.Channel, ephys.Unit],
        hist_db: [
            histology.ElectrodeCCFPosition.ElectrodePosition,
            histology.ElectrodeCCFPosition.ElectrodePositionError,
            histology.EphysCharacteristic
        ]
    }

    with conn.transaction:
        for schema in [lab, ephys, histology]:
            for tbl in fixes[schema.schema.database]:

                q_str = '''
                        alter table `{}`.`{}`
                        modify `electrode_config_name` varchar(64) NOT NULL
                        comment 'user friendly name'
                        '''.format(schema.schema.database, tbl.table_name)

                log.warning('electrode_config_name `{}`.`{}`'.format(
                    schema.schema.database, tbl.table_name))
                res = conn.query(q_str)
def teardown_package():
    """
    Package-level unit test teardown.
    Removes all databases with name starting with PREFIX.
    To deal with possible foreign key constraints, it will unset
    and then later reset FOREIGN_KEY_CHECKS flag
    """
    conn = dj.conn(**CONN_INFO)
    conn.query('SET FOREIGN_KEY_CHECKS=0')
    cur = conn.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX))
    for db in cur.fetchall():
        conn.query('DROP DATABASE `{}`'.format(db[0]))
    conn.query('SET FOREIGN_KEY_CHECKS=1')
    if os.path.exists("dj_local_conf.json"):
        remove("dj_local_conf.json")

    # Remove old S3
    objs = list(minioClient.list_objects_v2(S3_MIGRATE_BUCKET, recursive=True))
    objs = [
        minioClient.remove_object(S3_MIGRATE_BUCKET,
                                  o.object_name.encode('utf-8')) for o in objs
    ]
    minioClient.remove_bucket(S3_MIGRATE_BUCKET)

    # Remove S3
    objs = list(
        minioClient.list_objects_v2(S3_CONN_INFO['bucket'], recursive=True))
    objs = [
        minioClient.remove_object(S3_CONN_INFO['bucket'],
                                  o.object_name.encode('utf-8')) for o in objs
    ]
    minioClient.remove_bucket(S3_CONN_INFO['bucket'])

    # Remove old File Content
    shutil.rmtree(str(Path(os.path.expanduser('~'), 'temp')))
Beispiel #27
0
    def test_settings_table():
        dj.config['database.host'] = '127.0.0.1'
        dj.config['database.user'] = '******'
        dj.config['database.password'] = '******'
        dj.config['enable_python_native_blobs'] = True
        dj.config['enable_python_pickle_blobs'] = True
        dj.config['enable_automakers'] = True
        dj.config['tmp_folder'] = '.'
        schema = dj.schema('tutorial')
        dj.conn()

        @schema
        class ManualEntry(dj.Manual):
            definition = """
           id : smallint
           ---
           help : longblob
           """

        @schema
        class ImportedEntry(dj.AutoImported):
            definition = """
           -> ManualEntry
           ---
           data = null : longblob
           """

            def make_compatible(self, data):

                return {'data': data}

        try:
            ImportedEntry.settings_table.insert1({
                'settings_name': 'mean1',
                'func': np.mean,
                'global_settings': {
                    'keepdims': True
                },
                'entry_settings': {
                    'a': 'help'
                }
            })
        except:
            pass
        finally:
            df = (ImportedEntry).settings_table.fetch(format='frame')
            assert_true(list(df.index) == ['mean1'])
Beispiel #28
0
    def _attempt_login(database_address: str, username: str, password: str):
        """
        Attempts to authenticate against database with given username and address.

        :param database_address: Address of database
        :type database_address: str
        :param username: Username of user
        :type username: str
        :param password: Password of user
        :type password: str
        """
        dj.config["database.host"] = database_address
        dj.config["database.user"] = username
        dj.config["database.password"] = password

        # Attempt to connect return true if successful, false is failed
        dj.conn(reset=True)
Beispiel #29
0
def connection():
    dj.config['safemode'] = False
    connection = dj.conn(host=getenv('TEST_DB_SERVER'),
                         user=getenv('TEST_DB_USER'),
                         password=getenv('TEST_DB_PASS'), reset=True)
    yield connection
    dj.config['safemode'] = True
    connection.close()
Beispiel #30
0
    def attempt_login(database_address: str, username: str, password: str):
        """
        Attempts to authenticate against database with given username and address
        :param database_address: Address of database
        :type database_address: str
        :param username: Username of user
        :type username: str
        :param password: Password of user
        :type password: str
        :return: Dictionary with keys: result(True|False), and error (if applicable)
        :rtype: dict
        """
        dj.config['database.host'] = database_address
        dj.config['database.user'] = username
        dj.config['database.password'] = password

        # Attempt to connect return true if successful, false is failed
        dj.conn(reset=True)
        return dict(result=True)
Beispiel #31
0
    def load_ccf_r3_20um(cls):
        """
        Load the CCF r3 20 uM Dataset.
        Requires that dj.config['ccf.r3_20um_path'] be set to the location
        of the CCF Annotation tif stack.
        """
        # TODO: scaling
        log.info('CCFAnnotation.load_ccf_r3_20um(): start')

        self = cls()  # Instantiate self,
        stack_path = dj.config['custom']['ccf.r3_20um_path']
        stack = imread(stack_path)  # load reference stack,

        log.info('.. loaded stack of shape {} from {}'.format(
            stack.shape, stack_path))

        # iterate over ccf ontology region id/name records,
        regions = self.get_ccf_r3_20um_ontology_regions()
        region, nregions = 0, len(regions)
        chunksz, ib_args = 50000, {
            'skip_duplicates': True,
            'allow_direct_insert': True
        }

        for num, txt in regions:

            region += 1
            num = int(num)

            log.info('.. loading region {} ({}/{}) ({})'.format(
                num, region, nregions, txt))

            # extracting filled volumes from stack in scaled [[x,y,z]] shape,
            vol = np.array(np.where(stack == num)).T[:, [2, 1, 0]] * 20

            if not vol.shape[0]:
                log.info('.. region {} volume: shape {} - skipping'.format(
                    num, vol.shape))
                continue

            log.info('.. region {} volume: shape {}'.format(num, vol.shape))

            with dj.conn().transaction:
                with InsertBuffer(CCF, chunksz, **ib_args) as buf:
                    for vox in vol:
                        buf.insert1((CCFLabel.CCF_R3_20UM_ID, *vox))
                        buf.flush()

                with InsertBuffer(cls, chunksz, **ib_args) as buf:
                    for vox in vol:
                        buf.insert1((CCFLabel.CCF_R3_20UM_ID, *vox,
                                     CCFLabel.CCF_R3_20UM_TYPE, txt))
                        buf.flush()

        log.info('.. done.')
    def test_autocomputed():
        dj.config['database.host'] = '127.0.0.1'
        dj.config['database.user'] = '******'
        dj.config['database.password'] = '******'
        dj.config['enable_python_native_blobs'] = True
        dj.config['enable_python_pickle_blobs'] = True
        dj.config['enable_automakers'] = True
        dj.config['tmp_folder'] = '.'
        schema = dj.schema('tutorial')
        dj.conn()

        @schema
        class Multi3(dj.AutoComputed):
            definition = """
            id : smallint
            ---
            help : longblob
            help2 : longblob
            help3 : longblob
            help4 = null : longblob
            """

        try:
            Multi3.insert([{
                'id': 9,
                'help': '12341234',
                'help2': 12354,
                'help3': 'asdfksajdfljk',
                'help4': 'asdf'
            }])
        except Exception as e:
            assert_true((isinstance(e, dj.errors.DataJointError)))
            assert_true(np.shape(Multi3().fetch())[0] == 0)
        if (np.shape(Multi3.settings_table.fetch())[0] < 1):
            Multi3.settings_table.insert1({
                'settings_name': 'std1',
                'func': np.std,
                'global_settings': {
                    'keepdims': False
                }
            })
        assert_true(np.shape(Multi3().fetch())[0] == 0)
Beispiel #33
0
def teardown_package():
    """
    Package-level unit test teardown.
    Removes all databases with name starting with PREFIX.
    To deal with possible foreign key constraints, it will unset
    and then later reset FOREIGN_KEY_CHECKS flag
    """
    conn = dj.conn(**CONN_INFO)
    conn.query("SET FOREIGN_KEY_CHECKS=0")
    cur = conn.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX))
    for db in cur.fetchall():
        conn.query("DROP DATABASE `{}`".format(db[0]))
    conn.query("SET FOREIGN_KEY_CHECKS=1")
def test_unauthorized_database():
    """
    an attempt to create a database to which user has no privileges should raise an informative exception.
    """
    dj.schema('unauthorized_schema', connection=dj.conn(**CONN_INFO))
 def __init__(self):
     self.relation = self.Subjects()
     self.conn = dj.conn(**CONN_INFO)
"""
A simple, abstract schema to test relational algebra
"""
import random
import datajoint as dj
import itertools

from . import PREFIX, CONN_INFO
import numpy as np

schema = dj.schema(PREFIX + '_relational', locals(), connection=dj.conn(**CONN_INFO))


@schema
class IJ(dj.Lookup):
    definition = """  # tests restrictions
    i  : int
    j  : int
    """
    contents = list(dict(i=i, j=j+2) for i in range(3) for j in range(3))


@schema
class JI(dj.Lookup):
    definition = """  # tests restrictions by relations when attributes are reordered
    j  : int
    i  : int
    """
    contents = list(dict(i=i+1, j=j) for i in range(3) for j in range(3))

Beispiel #37
0
# And how I run the file in an IPython Notebook
import datajoint as dj
import os

c = dj.conn()

%run sandbox.py

from sandbox import Basic
basic = Basic()
comp = Dependent()

basic.insert1({'exp_date':'2015-11-24','path':'example/path'})

comp.populate()

# For me it yields the following error:
# InternalError: (1630, "FUNCTION datetime.date does not exist. Check the 'Function Name Parsing and Resolution' section in the Reference Manual")
"""
a schema for testing external attributes
"""

import tempfile
import datajoint as dj

from . import PREFIX, CONN_INFO
import numpy as np

schema = dj.schema(PREFIX + '_extern', connection=dj.conn(**CONN_INFO))


dj.config['external'] = {
    'protocol': 'file',
    'location': 'dj-store/external'}

dj.config['external-raw'] = {
    'protocol': 'file',
    'location': 'dj-store/raw'}

dj.config['external-compute'] = {
    'protocol': 's3',
    'location': '/datajoint-projects/test',
    'user': '******',
    'token': '2e05709792545ce'}

dj.config['cache'] = tempfile.mkdtemp('dj-cache')


@schema
import datajoint as dj
from . import PREFIX, CONN_INFO

schema = dj.schema(PREFIX + '_advanced', locals(), connection=dj.conn(**CONN_INFO))


@schema
class Person(dj.Manual):
    definition = """
    person_id : int
    ----
    full_name : varchar(60)
    sex : enum('M','F')
    """

    def fill(self):
        """
        fill fake names from www.fakenamegenerator.com
        """
        self.insert((
            (0, "May K. Hall", "F"),
            (1, "Jeffrey E. Gillen", "M"),
            (2, "Hanna R. Walters", "F"),
            (3, "Russel S. James", "M"),
            (4, "Robbin J. Fletcher", "F"),
            (5, "Wade J. Sullivan", "M"),
            (6, "Dorothy J. Chen", "F"),
            (7, "Michael L. Kowalewski", "M"),
            (8, "Kimberly J. Stringer", "F"),
            (9, "Mark G. Hair", "M"),
            (10, "Mary R. Thompson", "F"),
def test_drop_database():
    schema = dj.schema(PREFIX + "_drop_test", locals(), connection=dj.conn(**CONN_INFO))
    assert_true(schema.exists)
    schema.drop()
    assert_false(schema.exists)
    schema.drop()  # should do nothing
Beispiel #41
0
"""
Sample schema with realistic tables for testing
"""

import random
import numpy as np
import datajoint as dj
import os, signal
from . import PREFIX, CONN_INFO

schema = dj.schema(PREFIX + '_test1', connection=dj.conn(**CONN_INFO))


@schema
class Test(dj.Lookup):
    definition = """
    key   :   int     # key
    ---
    value   :   int     # value
    """
    contents = [(k, 2*k) for k in range(10)]


@schema
class TestExtra(dj.Manual):
    """
    clone of Test but with an extra field
    """
    definition = Test.definition + "\nextra : int # extra int\n"

def test_virtual_module():
    module = dj.create_virtual_module('module', schema.schema.database, connection=dj.conn(**CONN_INFO))
    assert_true(issubclass(module.Experiment, UserTable))
def test_dj_conn():
    """
    Should be able to establish a connection
    """
    c = dj.conn(**CONN_INFO)
    assert c.is_connected
Beispiel #44
0
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 10 22:01:52 2014

@author: eywalker
"""

import datajoint as dj

conn = dj.conn()
conn.bind(__name__, 'aperture_problem')

class Example(dj.Relvar):
    """
    aperture.Example (manual) # an example table
    -> aperture.Subjects
    id             :int unsigned       # unique subject identifier number
    -----
    phrase        :varchar(10)        # Initials of the subject
    power='N'      :ENUM('M','F','N')  # Gender of the subject, Male, Femlae, or Not-specified
    -> aperture.Exp1ShapeAxes
    """

class Subjects(dj.Relvar):
    """
    aperture.Subjects (manual) # my \
    newest table
    subject_id              :int unsigned       # unique subject identifier number
    -----
    subject_initials        :varchar(10)        # Initials of the subject
    subject_gender='N'      :ENUM('M','F','N')  # Gender of the subject, Male, Femlae, or Not-specified
from . import PREFIX, CONN_INFO
import datajoint as dj
from nose.tools import assert_true


schema = dj.schema(PREFIX + '_keywords', locals(), connection=dj.conn(**CONN_INFO))


class A(dj.Manual):
    definition = """
    a_id: int   # a id
    """


class B(dj.Manual):
    source = None
    definition = """
    -> self.source
    b_id: int   # b id
    """

    class H(dj.Part):
        definition = """
        -> master
        name: varchar(128)  # name
        """

    class C(dj.Part):
        definition = """
        -> master
        -> master.H
 def setup_class(cls):
     cls.relation = cls.Subjects()
     cls.conn = dj.conn(**CONN_INFO)
def test_repr():
    c1 = dj.conn(**CONN_INFO)
    assert_true('disconnected' not in repr(c1) and 'connected' in repr(c1))
Beispiel #48
0
import numpy as np
from nose.tools import assert_true
import datajoint as dj
from . import PREFIX, CONN_INFO

schema = dj.schema(PREFIX + '_nantest', locals(), connection=dj.conn(**CONN_INFO))


@schema
class NanTest(dj.Manual):
    definition = """
    id :int
    ---
    value=null :double
    """


class TestNaNInsert:
    @classmethod
    def setup_class(cls):
        cls.rel = NanTest()
        with dj.config(safemode=False):
            cls.rel.delete()
        a = np.array([0, 1/3, np.nan, np.pi, np.nan])
        cls.rel.insert(((i, value) for i, value in enumerate(a)))
        cls.a = a

    def test_insert_nan(self):
        """Test fetching of null values"""
        b = self.rel.fetch('value', order_by='id')
        assert_true((np.isnan(self.a) == np.isnan(b)).all(),
def test_drop_database():
    schema = dj.schema(PREFIX + '_drop_test', connection=dj.conn(reset=True, **CONN_INFO))
    assert_true(schema.exists)
    schema.drop()
    assert_false(schema.exists)
    schema.drop()  # should do nothing
 def setup(self):
     self.conn = dj.conn(reset=True, **CONN_INFO)
import datajoint as dj
import numpy as np

from . import PREFIX, CONN_INFO
from numpy.testing import assert_array_equal


schema_in = dj.schema(PREFIX + '_test_bypass_serialization_in',
                      connection=dj.conn(**CONN_INFO))

schema_out = dj.schema(PREFIX + '_test_blob_bypass_serialization_out',
                       connection=dj.conn(**CONN_INFO))


test_blob = np.array([1, 2, 3])


@schema_in
class InputTable(dj.Lookup):
    definition = """
    id:                 int
    ---
    data:               blob
    """
    contents = [(0, test_blob)]


@schema_out
class OutputTable(dj.Manual):
    definition = """
    id:                 int