Beispiel #1
0
def blackfynn_cli():
    args = docopt(__doc__,
                  version='bf version {}'.format(blackfynn.__version__),
                  options_first=True)

    # Test for these two commands first as they
    # do not require a Blackfynn client
    if args['<command>'] in ['help', None]:
        print((__doc__.strip('\n')))
        return

    if args['<command>'] == 'profile':
        from blackfynn.cli import bf_profile
        bf_profile.main()
        return

    # Display warning message if config.ini is not found
    settings = Settings(
    )  # create a dummy settings object to load environment variables and defaults only
    if not os.path.exists(settings.config_file):
        print(
            "\033[31m* Warning: No config file found, run 'bf profile' to start the setup assistant\033[0m"
        )

    # Try to use profile specified by --profile, exit if invalid
    try:
        bf = Blackfynn(args['--profile'])
    except Exception as e:
        exit(e)

    #Try to use dataset specified by --dataset, exit if invalid
    try:
        if args['--dataset'] is not None:
            dataset = bf.get_dataset(args['--dataset'])
            set_working_dataset(dataset.id)
    except Exception as e:
        exit(e)

    if args['<command>'] == 'status':
        from blackfynn.cli import bf_status
        bf_status.main(bf)
    elif args['<command>'] == 'use':
        from . import bf_use
        bf_use.main(bf)
    elif args['<command>'] == 'init':
        from . import bf_init
        bf_init.main(bf)
    elif args['<command>'] in ['datasets', 'ds']:
        from . import bf_datasets
        bf_datasets.main(bf)
    elif args['<command>'] in ['organizations', 'orgs']:
        from . import bf_organizations
        bf_organizations.main(bf)
    elif args['<command>'] in ['share', 'unshare', 'collaborators']:
        from . import bf_share
        bf_share.main(bf)
    elif args['<command>'] == 'cache':
        from . import bf_cache
        bf_cache.main(bf)
    elif args['<command>'] == 'create':
        from . import bf_create
        bf_create.main(bf)
    elif args['<command>'] == 'delete':
        from . import bf_delete
        bf_delete.main(bf)
    elif args['<command>'] == 'move':
        from . import bf_move
        bf_move.main(bf)
    elif args['<command>'] == 'rename':
        from . import bf_rename
        bf_rename.main(bf)
    elif args['<command>'] == 'props':
        from . import bf_props
        bf_props.main(bf)
    elif args['<command>'] == 'get':
        from . import bf_get
        bf_get.main(bf)
    elif args['<command>'] == 'where':
        from . import bf_where
        bf_where.main(bf)
    elif args['<command>'] == 'upload':
        from . import bf_upload
        bf_upload.main(bf)
    elif args['<command>'] == 'append':
        from . import bf_append
        bf_append.main(bf)
    elif args['<command>'] == 'search':
        from . import bf_search
        bf_search.main(bf)
    else:
        exit("Invalid command: '{}'\nSee 'bf help' for available commands".
             format(args['<command>']))
Beispiel #2
0
def convert_ncs2bfts(dataFolder,
                     resultFolder=None,
                     bfFileName=None,
                     dsName=None,
                     fs=None):
    """
        param dataFolder: folder containing ncs files
        param resultFolder: folder to contain results
        param bfFileName: name to save generated bfts file
        param dsName: name (or id) of dataset
        param fs: sampling rate
        return: _____
    """
    dataFolder = os.path.abspath(dataFolder)
    if not bfFileName:
        bfFileName = os.path.basename(dataFolder) + '.bfts'
    if resultFolder:
        resultFolder = os.path.abspath(resultFolder)
        resultFile = os.path.join(resultFolder, bfFileName)
    else:
        resultFile = bfFileName
#
    if os.path.isfile(resultFile):
        print(bfFileName, 'exists')
    else:
        print('Converting to', bfFileName, '...')
        chls = OrderedDict()  # dictionary to store channel values
        for chFile in os.listdir(dataFolder):
            if chFile.endswith('ncs'):
                ncs = ncs2py.load_ncs(
                    os.path.join(dataFolder, chFile)
                )  # import neuralynx data. NOTE: import stores information as a dictionary
                rawData = ncs['data']
                rawData = resample_poly(rawData, 1.0,
                                        ncs['sampling_rate'] / fs)
                chls.update({'ch' + chFile.split('.')[0][3:]: rawData})
#
        TimeCreated = [
            line.strip() for line in ncs['raw_header'].split('\r\n')
            if line != '' if 'TimeCreated' == line.strip()[1:12]
        ][0]
        TimeCreated = ncs2py.parse_neuralynx_time_string(TimeCreated)
        TimeCreated = (TimeCreated -
                       datetime(1970, 1, 1)).total_seconds() * (1e6)
        timeVec = ncs['timestamp'] + TimeCreated
        timeVec = arange(timeVec[0],
                         timeVec[-1], (1.0 / fs) * (1e6),
                         dtype=int64)
        sampleSize = timeVec.shape[0]
        #
        df = DataFrame(chls)[0:sampleSize]
        df.insert(0, 'timeStamp', timeVec)
        df.to_csv(resultFile, index=False)
#
    if dsName:
        bf = Blackfynn()
        ds = bf.get_dataset(dsName)
        if os.path.basename(resultFile)[:-5] in ds.get_items_names():
            print(bfFileName, 'uploaded')
        else:
            print('uploading', bfFileName, 'to Blackfynn...')
            ds.upload(resultFile)
Beispiel #3
0
            printf("metadata with key, %s, added to %s\n", key, meta_path)
        else:
            try:
                ds.remove_property(key, CATEGORY)
            except:
                printf("metadata with key, %s, does not exist in %s.\n", key,
                       meta_path)
                sys.exit()

            printf("metadata with key, %s, removed from %s.\n", key, meta_path)


###############################################################################
# program starts HERE
bf = Blackfynn()  # use 'default' profile
ALL = False
ADD = True
CATEGORY = 'Blackfynn'  # default category
DATASET = False
FILE = False
KEY = False
METAFILE = False
PATH = False
SHOW = False
TYPE = None
VALUE = False
meta_path = ""

if len(sys.argv) < 2:
    printf("%s\n", syntax())
from blackfynn import Blackfynn
import glob
import os
import time
import sys
if sys.platform == 'win32':
    rootPath = 'R:\\'
else:
    rootPath = os.path.join('//', 'media', 'rnelshare')

sys.path.append(
    os.path.join(rootPath, 'users', 'amn69', 'Projects', 'cat', 'selectivity',
                 'surface paper', 'v2018'))
import helperFcns as hf

bf = Blackfynn('lumbar_selectivity')
catDS = bf.get_dataset('N:dataset:1cc6b671-0dea-4aab-ad30-ed3884e17028')

db = hf.db
collection = db.blackfynnUpload

for iSub in ['HA04'
             ]:  #['Electro','Freeze','HA02','HA04','Galactus','Hobgoblin']:
    print iSub
    if collection.find({'subject': iSub}).count() == 0:
        subjFolder = catDS.create_collection(iSub)
    else:
        subjFolder = catDS.get_items_by_name(iSub)[0]

    targetSessions = sorted(hf.PWbySession[iSub].keys())
    for iSesh in targetSessions:
Beispiel #5
0
#!/usr/bin/env python3
'''
Find line lengths at certain times, in order to figure out threshold values
for the basic line length detector.

Usage: python -m helper_scripts.lineLengthTest ptName [startTime]
'''
import os
import sys
import numpy as np
from blackfynn import Blackfynn

from settings import CHANNELS, LL_CLIP_LENGTH, TS_IDs

ptName = sys.argv[1]
bf = Blackfynn()
ts = bf.get(TS_IDs[ptName])
ch = CHANNELS.get(ptName, None)

try:
    startTime = int(sys.argv[2])
    segments = ts.segments(start=startTime)
except:
    segments = ts.segments()
finally:
    startTime = segments[0][0]
    print('start time:', startTime)


def lineLength(clip):
    lengths = np.zeros(clip.shape[0]).astype('float64')
 def _getBlackfynn(self, profile_name):
     api_key = self._settings[profile_name]['api_token']
     api_secret = self._settings[profile_name]['api_secret']
     print('[{0}]:[{1}]'.format(api_key, api_secret))
     self._bf = Blackfynn(api_token=api_key, api_secret=api_secret)
     return self._bf
Beispiel #7
0
 def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None):
     self.client = Blackfynn(profile=None, api_token=api_token, api_secret=api_secret,
                             host=host, streaming_host=streaming_host)
Beispiel #8
0
def bf():
    return Blackfynn()
Beispiel #9
0
from blackfynn import Blackfynn
from datetime import datetime, timedelta
import scipy.io as sio
import sys

username = sys.argv[1]
passwd = sys.argv[2]
tsid = sys.argv[3]
ptname = sys.argv[4]
szfile = sys.argv[5]
#def pullSzClips(username, passwd, tsid, ptname, szfile):

# open up the BF dataset (again, this works for R951 but does not generalize)
# If you were to use a MEF file, you could point to it here
bf = Blackfynn(email=username,password=passwd)
bf.set_context('Mayo')
ts = bf.datasets()[0].items[0]

# get the annotation times in uUTC from file
with open(szfile) as f:
 	times = f.read().splitlines()
for i in range(0,len(times),2):
    	tempStart = datetime.utcfromtimestamp(float(times[i])/1e6)
    	tempEnd = datetime.utcfromtimestamp(float(times[i+1])/1e6)
    
    	print(str(tempStart) + "until" + str(tempEnd))
	# pull the data for each seizure
    	try:
        	print('about to pull')
    		df = ts.get_data(start=tempStart,end=tempEnd)
Beispiel #10
0
    while toContinue:
        for i in range(1, df.shape[0]):
            toContinue = False
            #print(i)
            deltaT = (df.iloc[i].name - df.iloc[i - 1].name).microseconds
            if deltaT > 10000:
                dfs.append(df.iloc[:i])
                df = df.iloc[i:]
                toContinue = True
                break
    dfs.append(df)
    return dfs


# Need to fill in personal Blackfynn login information here
bf = Blackfynn(email='you_email_here', password='******')
bf.set_context('Mayo')
ts = bf.datasets()[0].items[
    0]  # again, this is set up for 951 now. Need to change this line for a different BF dataset
# Here we grab the last 30 minutes of data
tempEnd = datetime.datetime.now()
tempStart = tempEnd - datetime.timedelta(0, 1800)

d = ts.get_data(start=tempStart, end=tempEnd)
d = d.drop_duplicates(
)  # had some issues with duplicate data points. Remove them if they exist
if d.shape[0] == 0:
    print('no data in time segment...quitting \n')
    exit()
print('starting split...')
dfs = split_data(d)
Beispiel #11
0
def connect_to_blackfynn():
    global bf
    bf = Blackfynn(api_token=Config.BLACKFYNN_API_TOKEN,
                   api_secret=Config.BLACKFYNN_API_SECRET,
                   env_override=False,
                   host=Config.BLACKFYNN_API_HOST)
Beispiel #12
0
def test_exception_raise():
    bf = Blackfynn()
    with pytest.raises(Exception) as excinfo:
        bf._api._call("get", "/datasets/plop")
    assert "plop not found" in str(excinfo.value)
Beispiel #13
0
def test_client_host_overrides():
    host = 'http://localhost'
    # fails authentication in Blackfynn.__init__
    with pytest.raises(requests.exceptions.RequestException):
        bf = Blackfynn(host=host)
import click
import sparc_dash
from blackfynn import Blackfynn

bf = Blackfynn('sparc-consortium')
ds = bf.get_dataset('SPARC Datasets')


@click.group()
def cli():
    pass


@click.command()
def clear():
    print('Clearing')
    out = sparc_dash.clearRecords(ds)


@click.command()
def create_models():
    """Example script."""
    out = sparc_dash.create_models(ds)


@click.command()
def update():
    """Example script."""
    out = sparc_dash.update(bf, ds)

Beispiel #15
0
def lineLength(ptName,
               startTime=None,
               endTime=None,
               append=False,
               layerName=LL_MA_LAYER_NAME):
    '''
    Runs the line length detector.

    ch: channels to annotate
    startTime: time (usec) to start from. Default value (None) starts from the beginning
    append: Whether to append to (or otherwise overwrite) the line length annotation layer
    layerName: name of layer to write to

    Returns: endTime (the end of the last full long-term window)
    '''
    global shortWindow, longWindow, freq, ch
    longWindow = LL_LONG_WINDOWS.get(ptName, LL_LONG_WINDOW_DEFAULT)
    shortWindow = LL_SHORT_WINDOWS.get(ptName, LL_SHORT_WINDOW_DEFAULT)
    freq = FREQs.get(ptName, DEFAULT_FREQ)
    ch = CHANNELS.get(ptName, None)

    bf = Blackfynn()
    ts = bf.get(TS_IDs[ptName])

    # Make sure startTime and endTime are valid
    if startTime is not None:
        if startTime < ts.start:
            print(
                'Warning: startTime', startTime,
                'is before the beginning of the Timeseries. Starting from the beginning...'
            )
            startTime = None
        elif startTime > ts.end:
            print('Warning: startTime', startTime,
                  'is after the end of the Timeseries. Exiting...')
            return ts.end

    if endTime is not None:
        if endTime > ts.end:
            print(
                'Warning: endTime', endTime,
                'is after the end of the Timeseries. Stopping at the end...')
            endTime = None
        elif endTime < ts.start:
            print('Warning: endTime', endTime,
                  'is before the beginning the Timeseries. Exiting...')
            return ts.start

    # Get/create annotation layer
    try:
        layer = ts.get_layer(layerName)
        if append:
            print("Appending to layer '%s'" % layerName)
        else:
            print("Overwriting layer '%s'" % layerName)
            layer.delete()
            layer = ts.add_layer(layerName)
    except:
        print("Creating layer '%s'" % layerName)
        layer = ts.add_layer(layerName)

    # Find the long-term windows to start and end from
    windowStart = ts.start
    windowEnd = windowStart + longWindow
    if startTime:
        while windowEnd < startTime:
            windowStart = windowEnd
            windowEnd += longWindow
    else:
        startTime = ts.start
    if not endTime:
        endTime = ts.end

    # Make sure segments list starts at startTime and ends at endTime
    segments = ts.segments(startTime, endTime + 1)
    if not segments:
        print('No data found between %d and %d.' % (startTime, endTime), \
              ' Exiting...')
        return endTime
    startTime = max(segments[0][0], startTime)
    print('start time:', startTime)
    segments[0] = (startTime, segments[0][1])

    endTime = min(segments[-1][1], endTime)
    print('end time:', endTime)
    segments[-1] = (segments[-1][0], endTime)

    # Go through each long-term window
    while windowStart < endTime and windowEnd <= ts.end:
        # Calculate trend and threshold
        try:
            trend, shortClips = _trend(ts, windowStart, windowEnd)
        except RequestException:
            # Workaround in case of server errors
            sleep(2)
            continue
        if trend is None:
            print('skipping long window (no clips)...')
            sys.stdout.flush()
            windowStart = windowEnd
            windowEnd += longWindow
            continue

        # If using a custom start time, trim shortClips
        # (This should only ever happen with the 1st long-term window)
        if windowStart < startTime:
            try:
                i = next(i for i, c in enumerate(shortClips)
                         if c['end'] > startTime)
                shortClips[:i] = []
            except StopIteration:
                pass
            shortClips[0]['start'] = max(shortClips[0]['start'], startTime)
            # Delete 1st clip if it's too short (less than half of shortWindow):
            if shortClips[0]['end'] - shortClips[0]['start'] < shortWindow / 2:
                shortClips.pop(0)

        # Do the same thing with endTime
        # (Should only ever happen to the last long-term window)
        if windowEnd > endTime:
            l = len(shortClips)
            try:
                i = next(l - 1 - i for i, c in enumerate(reversed(shortClips))
                         if c['start'] < endTime)
                shortClips[i + 1:] = []
            except StopIteration:
                pass
            # Delete last clip if it's not long enough
            lastClip = shortClips.pop(-1)
            if lastClip['end'] - lastClip['start'] >= shortWindow / 2:
                lastClip['end'] = min(lastClip['end'], endTime)
                shortClips.append(lastClip)
            else:
                pass

        # Annotate and/or print predictions
        threshold = LL_MA_THRESHOLDS[ptName] * trend
        for clip in shortClips:
            l = clip['length']
            if l > threshold:
                print('+ %f (%d, %d)' % (l, clip['start'], clip['end']))
                layer.insert_annotation('Possible seizure',
                                        start=clip['start'],
                                        end=clip['end'])
            else:
                print('- %f (%d, %d)' % (l, clip['start'], clip['end']))
            sys.stdout.flush()

        # Go to next long term window
        windowStart = windowEnd
        windowEnd += longWindow
    return min(windowStart, endTime)