示例#1
0
def sessions_identical(subj1, sess1, subj2, sess2):
    '''Tests the given sessions to make sure the datasets are the same'''
    dsets1 = [os.path.basename(str(x)) for x in subj1.dsets(session=sess1)]
    dsets2 = [os.path.basename(str(x)) for x in subj2.dsets(session=sess2)]
    dsets = list(set(dsets1 + dsets2))
    return_val = True
    with nl.notify('Comparing sessions %s.%s and %s.%s:' %
                   (subj1, sess1, subj2, sess2)):
        for dset in dsets:
            if not dsets_identical(
                    os.path.join(p.sessions_dir(subj1), sess1, dset),
                    os.path.join(p.sessions_dir(subj2), sess2, dset)):
                return_val = False
                continue
    return return_val
示例#2
0
 def from_dict(cls,subject,session,dict_source):
     dset = Dset(subject,session,dict_source['filename'])
     dset.complete = dict_source['complete'] if 'complete' in dict_source else True
     dset.md5 = dict_source['md5'] if 'md5' in dict_source else None
     dset.meta = PrefixDict(dict_source['meta'])
     dset.meta.prefix = os.path.join(p.sessions_dir(subject),session) + '/'
     return dset
示例#3
0
文件: maint.py 项目: azraq27/padre
def import_to_padre(subject_id,session,dsets,raw_data=[],dir_prefix=''):
    with commit_wrap():
        subj = create_subject(subject_id)
        try:
            new_session(subj,session)
        except SessionExists:
            pass
        session_dict = dict(subj._sessions[session])
        session_dict['unverified'] = True
        session_dict['date'] = datetime.datetime.strftime(nl.date_for_str(session),'%Y-%m-%d')
        for full_dset in sorted(dsets,key=lambda x:(int(os.path.basename(x).split('-')[1]),int(os.path.basename(x).split('-')[2]))):
            dset = {}
            dset['filename'] = os.path.basename(full_dset)
            if dset['filename'] not in [x.__str__(False) for x in subj.dsets(include_all=True)]:
                dset['md5'] = nl.hash(full_dset)
                dset['complete'] = True
                dset['meta'] = {}
                label = guess_label(dset['filename'].split('-')[3])
                if label not in session_dict['labels']:
                    session_dict['labels'][label] = []
                session_dict['labels'][label].append(dset)
                dset_fname = os.path.join(p.sessions_dir(subj),session,dset['filename'])
                if not os.path.exists(dset_fname):
                    shutil.move(full_dset,dset_fname)
        for raw in raw_data:
            try:
                shutil.move(os.path.join(dir_prefix,'raw',raw),p.raw_dir(subj))
            except:
                pass
        subj._sessions[session] = session_dict
        subj.save()
示例#4
0
 def __init__(self,subject,session,dset_fname,label=None,complete=True,md5=None,meta={}):
     str.__init__(self)#,Dset.abspath(subject,session,dset_fname))
     self._dset_fname = dset_fname
     #: Whether this is a complete, usable dataset
     self.complete = complete
     #: dictionary of meta data associated with this dataset (e.g., ``eprime`` or ``eprime-txt``)
     self.meta = PrefixDict(meta)
     self.meta.prefix = os.path.join(p.sessions_dir(subject),session) + '/'
     #: md5 checksum of dataset file (used for checking for data corruption)
     self.md5 = md5
     
     #: Date this was acquired
     self.date = subject._sessions[session]['date'] if 'date' in subject._sessions[session] else None
     #: Experiment in which this was aquired
     self.experiment = subject._sessions[session]['experiment'] if 'experiment' in subject._sessions[session] else None
     #: Kind of dataset (e.g., ``anatomy``)
     self.label = label
     if self.label == None:
         try:
             for l in subject._sessions[session]['labels']:
                 for d in subject._sessions[session]['labels'][l]:
                     if d['filename'] == dset_fname:
                         self.label = l
                         raise StopIteration
         except StopIteration:
             pass
     #: Tags of session this was acquired during
     self.tags = subject._sessions[session]['tags'] if 'tags' in subject._sessions[session] else None
     #: Label of session this was acquired during
     self.session = session
     
     self._info = None
     self._subject = subject
     
     str.__init__(self)#,self.__abspath__())
示例#5
0
def merge_session(subj_from, subj_to, sess):
    if sess not in subj_to._sessions:
        subj_to._sessions[sess] = subj_from._sessions[sess]
    else:
        for k in subj_from._sessions[sess]:
            if k != 'labels':
                if k in subj_to._sessions[sess]:
                    merge_attr(subj_from._sessions[sess][k],
                               subj_to._sessions[sess][k])
                else:
                    subj_to._sessions[sess][k] = subj_from._sessions[sess][k]
        for label in subj_from._sessions[sess]['labels']:
            if label not in subj_to._sessions[sess]['labels']:
                subj_to._sessions[sess]['labels'][label] = []
            for dset in subj_from._sessions[sess]['labels'][label]:
                try:
                    to_i = dset_i(subj_to._sessions[sess]['labels'][label],
                                  dset['filename'])
                    subj_to._sessions[sess]['labels'][label][to_i] = dict(
                        dset.items() +
                        subj_to._sessions[sess]['labels'][label][to_i].items())
                except ValueError:
                    subj_to._sessions[sess]['labels'][label].append(dset)
    del (subj_from._sessions[sess])
    new_sess_dir = os.path.join(p.sessions_dir(subj_to), sess)
    from_sess_dir = os.path.join(p.sessions_dir(subj_from), sess)
    if not os.path.exists(new_sess_dir):
        os.makedirs(new_sess_dir)
    for r, ds, fs in os.walk(from_sess_dir):
        for f in fs:
            dset_f = os.path.join(new_sess_dir, f)
            if not os.path.exists(dset_f):
                os.rename(os.path.join(r, f), dset_f)
    if len(os.listdir(from_sess_dir)) == 0:
        os.rmdir(from_sess_dir)
    else:
        new_dir = os.path.join(
            p.trash_dir, '%s-%s-%s' %
            (subj_from, sess,
             datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S').format()))
        while os.path.exists(new_dir):
            new_dir += '_1'
        os.rename(from_sess_dir, new_dir)
    subj_from.save()
    subj_to.save()
示例#6
0
 def init_directories(self):
     ''' create directories that these scripts expect on the disk '''
     for d in [
             p.subject_dir(self),
             p.raw_dir(self),
             p.sessions_dir(self)
         ]:
         if not os.path.exists(d):
             os.makedirs(d)   
示例#7
0
def _files_exist():
    '''make sure all the files that should be there, are still there'''
    for s in p.subjects():
        for dset in s.dsets:
            if not os.path.exists(dset):
                raise DatabaseConsistencyError("dataset is missing: %s" % dset)
        for sess in s.sessions:
            if 'scan_sheets' in sess:
                if not os.path.exists(os.path.join(p.sessions_dir(s),sess,sess['scan_sheets'])):
                    raise DatabaseConsistencyError("scan sheets PDF is missing: %s" % sess['scan_sheets'])
示例#8
0
文件: maint.py 项目: azraq27/padre
def sessions_identical(subj1,sess1,subj2,sess2):
    '''Tests the given sessions to make sure the datasets are the same'''
    dsets1 = [os.path.basename(str(x)) for x in subj1.dsets(session=sess1)]
    dsets2 = [os.path.basename(str(x)) for x in subj2.dsets(session=sess2)]
    dsets = list(set(dsets1+dsets2))
    return_val = True
    with nl.notify('Comparing sessions %s.%s and %s.%s:' % (subj1,sess1,subj2,sess2)):
        for dset in dsets:
            if not dsets_identical(os.path.join(p.sessions_dir(subj1),sess1,dset),os.path.join(p.sessions_dir(subj2),sess2,dset)):
                return_val = False
                continue
    return return_val
示例#9
0
def delete_session(subj, session_name, purge=False):
    ''' delete a session
    
    By default, will only delete the references to the data within the JSON file.
    If ``purge`` is given as ``True``, then it will also delete the files from
    the disk (be careful!). ``purge`` will also automatically call ``save``.'''
    with commit_wrap():
        del (subj._sessions[session_name])
        if purge:
            session_dir = os.path.join(p.sessions_dir(subj), session_name)
            if os.path.exists(session_dir):
                shutil.rmtree(session_dir)
        subj.save()
示例#10
0
def new_session(subj, session_name):
    ''' create a new session
    
    Inserts the proper data structure into the JSON file, as well as creating
    the directory on disk.
    '''
    with commit_wrap():
        if session_name in subj._sessions:
            raise SessionExists
        session_dir = os.path.join(p.sessions_dir(subj), session_name)
        if not os.path.exists(session_dir):
            os.makedirs(session_dir)
        subj._sessions[session_name] = {'labels': {}}
示例#11
0
文件: maint.py 项目: azraq27/padre
def delete_session(subj,session_name,purge=False):
    ''' delete a session
    
    By default, will only delete the references to the data within the JSON file.
    If ``purge`` is given as ``True``, then it will also delete the files from
    the disk (be careful!). ``purge`` will also automatically call ``save``.'''
    with commit_wrap():
        del(subj._sessions[session_name])
        if purge:
            session_dir = os.path.join(p.sessions_dir(subj),session_name)
            if os.path.exists(session_dir):
                shutil.rmtree(session_dir)
        subj.save()
示例#12
0
文件: maint.py 项目: azraq27/padre
def new_session(subj,session_name):
    ''' create a new session
    
    Inserts the proper data structure into the JSON file, as well as creating
    the directory on disk.
    '''
    with commit_wrap():
        if session_name in subj._sessions:
            raise SessionExists
        session_dir = os.path.join(p.sessions_dir(subj),session_name)
        if not os.path.exists(session_dir):
            os.makedirs(session_dir)
        subj._sessions[session_name] = {'labels':{}}
示例#13
0
文件: maint.py 项目: azraq27/padre
def merge_session(subj_from,subj_to,sess):
    if sess not in subj_to._sessions:
        subj_to._sessions[sess] = subj_from._sessions[sess]
    else:
        for k in subj_from._sessions[sess]:
            if k!= 'labels':
                if k in subj_to._sessions[sess]:
                    merge_attr(subj_from._sessions[sess][k],subj_to._sessions[sess][k])
                else:
                    subj_to._sessions[sess][k] = subj_from._sessions[sess][k]
        for label in subj_from._sessions[sess]['labels']:
            if label not in subj_to._sessions[sess]['labels']:
                subj_to._sessions[sess]['labels'][label] = []
            for dset in subj_from._sessions[sess]['labels'][label]:
                try:
                    to_i = dset_i(subj_to._sessions[sess]['labels'][label],dset['filename'])
                    subj_to._sessions[sess]['labels'][label][to_i] = dict(dset.items() + subj_to._sessions[sess]['labels'][label][to_i].items())
                except ValueError:
                    subj_to._sessions[sess]['labels'][label].append(dset)
    del(subj_from._sessions[sess])
    new_sess_dir = os.path.join(p.sessions_dir(subj_to),sess)
    from_sess_dir = os.path.join(p.sessions_dir(subj_from),sess)
    if not os.path.exists(new_sess_dir):
        os.makedirs(new_sess_dir)
    for r,ds,fs in os.walk(from_sess_dir):
        for f in fs:
            dset_f = os.path.join(new_sess_dir,f)
            if not os.path.exists(dset_f):
                os.rename(os.path.join(r,f),dset_f)
    if len(os.listdir(from_sess_dir))==0:
        os.rmdir(from_sess_dir)
    else:
        new_dir = os.path.join(p.trash_dir,'%s-%s-%s' % (subj_from,sess,datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S').format()))
        while os.path.exists(new_dir):
            new_dir += '_1'
        os.rename(from_sess_dir,new_dir)
    subj_from.save()
    subj_to.save()
示例#14
0
def import_to_padre(subject_id, session, dsets, raw_data=[], dir_prefix=''):
    with commit_wrap():
        subj = create_subject(subject_id)
        try:
            new_session(subj, session)
        except SessionExists:
            pass
        session_dict = dict(subj._sessions[session])
        session_dict['unverified'] = True
        session_dict['date'] = datetime.datetime.strftime(
            nl.date_for_str(session), '%Y-%m-%d')
        for full_dset in sorted(dsets,
                                key=lambda x:
                                (int(os.path.basename(x).split('-')[1]),
                                 int(os.path.basename(x).split('-')[2]))):
            dset = {}
            dset['filename'] = os.path.basename(full_dset)
            if dset['filename'] not in [
                    x.__str__(False) for x in subj.dsets(include_all=True)
            ]:
                dset['md5'] = nl.hash(full_dset)
                dset['complete'] = True
                dset['meta'] = {}
                label = guess_label(dset['filename'].split('-')[3])
                if label not in session_dict['labels']:
                    session_dict['labels'][label] = []
                session_dict['labels'][label].append(dset)
                dset_fname = os.path.join(p.sessions_dir(subj), session,
                                          dset['filename'])
                if not os.path.exists(dset_fname):
                    shutil.move(full_dset, dset_fname)
        for raw in raw_data:
            try:
                shutil.move(os.path.join(dir_prefix, 'raw', raw),
                            p.raw_dir(subj))
            except:
                pass
        subj._sessions[session] = session_dict
        subj.save()
示例#15
0
def save_session(subject_id,session):
    with p.maint.commit_wrap():
        subj = p.load(subject_id)
        subj._sessions[session]['date'] = parse(request.forms.get("date")).strftime("%Y-%m-%d")
        experiment = request.forms.get("experiment")
        if experiment=='none':
            experiment = None
        if experiment=='new':
            experiment = request.forms.get("new_experiment_text")
            p.subject.experiments.add(experiment)
        subj._sessions[session]['experiment'] = experiment
        tag = request.forms.get("new_tag")
        if tag and tag!='':
            p.subject.tags.add(tag)
            if 'tags' not in subj._sessions[session]:
                subj._sessions[session]['tags'] = []
            subj._sessions[session]['tags'].append(tag)
        scan_sheet = request.files.get("scan_sheet")
        if scan_sheet != None:
            subj._sessions[session]['scan_sheet'] = scan_sheet.filename
            scan_sheet.save(os.path.join(p.sessions_dir(subj),session))
        subj._sessions[session]['notes'] = request.forms.get("notes")
        subj._sessions[session]['include'] = True if request.forms.get("include") else False
        for dset in subj.dsets(session=session,include_all=True):
            dset_fname = dset.__str__(False)
            i = [x['filename'] for x in subj._sessions[session]['labels'][dset.label]].index(dset_fname)
            subj._sessions[session]['labels'][dset.label][i]['complete'] = True if request.forms.get('complete_%s'%dset_fname) else False
            dset.complete = subj._sessions[session]['labels'][dset.label][i]['complete']
            label = request.forms.get('label_%s' % dset_fname)
            if label:
                if dset.label!=label:
                    if label=='new':
                        label = request.forms.get('label_%s_new' % dset_fname)
                        p.subject.tasks.add(label)
                        if label not in subj._sessions[session]['labels']:
                            subj._sessions[session]['labels'][label] = []
                    del(subj._sessions[session]['labels'][dset.label][i])
                    if len(subj._sessions[session]['labels'][dset.label])==0:
                        del(subj._sessions[session]['labels'][dset.label])
                    if label not in subj._sessions[session]['labels']:
                        subj._sessions[session]['labels'][label] = []
                    subj._sessions[session]['labels'][label].append(dset.__dict__())
            add_meta = request.files.get('add_meta_%s' % dset_fname)
            if add_meta:
                meta_type = request.forms.get('meta_type_%s'%dset_fname)
                subj._sessions[session]['labels'][dset.label][i]['meta'][meta_type] = add_meta.filename
                try:
                    add_meta.save(os.path.join(p.sessions_dir(subj),session))
                except IOError:
                    pass
        if 'unverified' in subj._sessions[session]:
            del(subj._sessions[session]['unverified'])
        subj.save()
        p.subject._all_subjects[str(subj)] = subj
        new_subj_id = request.forms.get("new_subject_id")
        if new_subj_id and new_subj_id!='':
            # create_subject will load the subject if it already exists...
            new_subj = p.maint.create_subject(new_subj_id)
            if new_subj:
                p.maint.merge_session(subj,new_subj,session)
                subj.save()
                new_subj.save()
                p.subject._index_one_subject(subj)
                p.subject._index_one_subject(new_subj)
                subj = new_subj
        else:
            p.subject._index_one_subject(subj)
    redirect('/edit_subject/%s/%s' % (subj,session))
示例#16
0
 def abspath(cls,_subj=None,_sess=None,_fname=None):
     '''return the absolute path to an arbitrary file'''
     return os.path.join(p.sessions_dir(_subj),_sess,_fname)
示例#17
0
def synchronize_to_disk(subj,
                        add_missing=True,
                        delete_duplicates=True,
                        delete_missing=True):
    '''Will try to clean up a subject's JSON file based on what files actually exist on disk
    
    :add_missing:           adds new JSON entries for files that exist on disk but aren't listed
    :delete_duplicates:     delete duplicate JSON entries if they refer to the same file
    :delete_missing:        delete JSON entries that have no file on disk'''
    def dset_in_dict(fname, l):
        return len(
            [x for x in nl.flatten(l.values()) if fname == x['filename']]) > 1

    with nl.notify('Trying to clean up subject %s' % subj):
        s = p.load(subj)
        if s == None:
            nl.notify('Error: couldn\'t load subject %s!' % subj,
                      level=nl.level.error)
            return False
        with commit_wrap():
            sess_on_disk = os.listdir(os.path.join(p.sessions_dir(s)))
            sess_extra_JSON = list(set(s._sessions.keys()) - set(sess_on_disk))
            sess_extra_disk = list(set(sess_on_disk) - set(s._sessions.keys()))
            if len(sess_extra_disk) > 0:
                if add_missing:
                    for sess in sess_extra_disk:
                        nl.notify('Creating missing session %s' % sess,
                                  level=nl.level.warning)
                        new_session(s, sess)
                else:
                    nl.notify(
                        'Warning: found sessions on disk with no entries: %s' %
                        (' '.join(sess_extra_disk)),
                        level=nl.level.warning)
            if len(sess_extra_JSON) > 0:
                if delete_missing:
                    for sess in sess_extra_JSON:
                        nl.notify('Removing session %s (missing from disk)' %
                                  sess,
                                  level=nl.level.warning)
                        del (s._sessions[sess])
                else:
                    nl.notify('Warning: found sessions missing from disk: %s' %
                              (' '.join(sess_extra_disk)),
                              level=nl.level.warning)
            for sess in s._sessions:
                with nl.notify('Checking session "%s"...' % sess):
                    new_sess = {}
                    for fname in os.listdir(
                            os.path.join(p.sessions_dir(s), sess)):
                        if nl.is_dset(fname):
                            res = s._index_of_dset_named(fname, sess)
                            if res:
                                # At least one copy of this in the old session
                                (_, label, i) = res
                                nl.notify('Found %s' % fname)
                                if label not in new_sess:
                                    new_sess[label] = []
                                new_sess[label].append(
                                    s._sessions[sess]['labels'][label][i])
                                del (s._sessions[sess]['labels'][label][i])
                            else:
                                # File on disk, but no entry
                                if add_missing:
                                    nl.notify('Adding new entry for file %s' %
                                              fname,
                                              level=nl.level.warning)
                                    dset = {}
                                    dset['filename'] = fname
                                    full_dset = os.path.join(
                                        p.sessions_dir(s), sess, fname)
                                    dset['md5'] = nl.hash(full_dset)
                                    dset['complete'] = True
                                    dset['meta'] = {}
                                    if 'unsorted' not in new_sess:
                                        new_sess['unsorted'] = []
                                    new_sess['unsorted'].append(dset)
                    for label in s._sessions[sess]['labels']:
                        if len(s._sessions[sess]['labels'][label]) > 0:
                            # Leftover entries that have no file
                            for dset in s._sessions[sess]['labels'][label]:
                                if dset_in_dict(dset['filename'], new_sess):
                                    # Already have seen this dataset somewhere...
                                    if delete_duplicates:
                                        nl.notify(
                                            'Deleting duplicate entry for file %s'
                                            % dset['filename'],
                                            level=nl.level.warning)
                                    else:
                                        nl.notify(
                                            'Warning: found duplicate entry for file %s (leaving in place)'
                                            % dset['filename'],
                                            level=nl.level.warning)
                                        new_sess[label].append(dset)
                                else:
                                    # Entry in JSON, but no file on disk
                                    if delete_missing:
                                        nl.notify(
                                            'Deleting missing dataset %s (no corresponding file on disk)'
                                            % dset['filename'],
                                            level=nl.level.warning)
                                    else:
                                        nl.notify(
                                            'Warning: found entry for %s, but no corresponding file on disk (leaving empty entry in place)'
                                            % dset['filename'],
                                            level=nl.level.warning)
                                        new_sess[label].append(dset)
                    s._sessions[sess]['labels'] = new_sess
            s.save()
示例#18
0
文件: maint.py 项目: azraq27/padre
def synchronize_to_disk(subj,add_missing=True,delete_duplicates=True,delete_missing=True):
    '''Will try to clean up a subject's JSON file based on what files actually exist on disk
    
    :add_missing:           adds new JSON entries for files that exist on disk but aren't listed
    :delete_duplicates:     delete duplicate JSON entries if they refer to the same file
    :delete_missing:        delete JSON entries that have no file on disk'''
    
    def dset_in_dict(fname,l):
        return len([x for x in nl.flatten(l.values()) if fname==x['filename']])>1
    
    with nl.notify('Trying to clean up subject %s' % subj):
        s = p.load(subj)
        if s==None:
            nl.notify('Error: couldn\'t load subject %s!' % subj,level=nl.level.error)
            return False
        with commit_wrap():
            sess_on_disk = os.listdir(os.path.join(p.sessions_dir(s)))
            sess_extra_JSON = list(set(s._sessions.keys()) - set(sess_on_disk))
            sess_extra_disk = list(set(sess_on_disk) - set(s._sessions.keys()))
            if len(sess_extra_disk)>0:
                if add_missing:
                    for sess in sess_extra_disk:
                        nl.notify('Creating missing session %s' % sess,level=nl.level.warning)
                        new_session(s,sess)
                else:
                    nl.notify('Warning: found sessions on disk with no entries: %s' % (' '.join(sess_extra_disk)),level=nl.level.warning)
            if len(sess_extra_JSON)>0:
                if delete_missing:
                    for sess in sess_extra_JSON:
                        nl.notify('Removing session %s (missing from disk)' % sess, level=nl.level.warning)
                        del(s._sessions[sess])
                else:
                    nl.notify('Warning: found sessions missing from disk: %s' % (' '.join(sess_extra_disk)),level=nl.level.warning)
            for sess in s._sessions:
                with nl.notify('Checking session "%s"...' % sess):
                    new_sess = {}
                    for fname in os.listdir(os.path.join(p.sessions_dir(s),sess)):
                        if nl.is_dset(fname):
                            res = s._index_of_dset_named(fname,sess)
                            if res:
                                # At least one copy of this in the old session
                                (_,label,i) = res
                                nl.notify('Found %s' % fname)
                                if label not in new_sess:
                                    new_sess[label] = []
                                new_sess[label].append(s._sessions[sess]['labels'][label][i])
                                del(s._sessions[sess]['labels'][label][i])
                            else:
                                # File on disk, but no entry
                                if add_missing:
                                    nl.notify('Adding new entry for file %s' % fname,level=nl.level.warning)
                                    dset = {}
                                    dset['filename'] = fname
                                    full_dset = os.path.join(p.sessions_dir(s),sess,fname)
                                    dset['md5'] = nl.hash(full_dset)
                                    dset['complete'] = True
                                    dset['meta'] = {}
                                    if 'unsorted' not in new_sess:
                                        new_sess['unsorted'] = []
                                    new_sess['unsorted'].append(dset)
                    for label in s._sessions[sess]['labels']:
                        if len(s._sessions[sess]['labels'][label])>0:
                            # Leftover entries that have no file
                            for dset in s._sessions[sess]['labels'][label]:
                                if dset_in_dict(dset['filename'],new_sess):
                                    # Already have seen this dataset somewhere...
                                    if delete_duplicates:
                                        nl.notify('Deleting duplicate entry for file %s' % dset['filename'],level=nl.level.warning)
                                    else:
                                        nl.notify('Warning: found duplicate entry for file %s (leaving in place)' % dset['filename'],level=nl.level.warning)
                                        new_sess[label].append(dset)
                                else:
                                    # Entry in JSON, but no file on disk
                                    if delete_missing:
                                        nl.notify('Deleting missing dataset %s (no corresponding file on disk)' % dset['filename'],level=nl.level.warning)
                                    else:
                                        nl.notify('Warning: found entry for %s, but no corresponding file on disk (leaving empty entry in place)' % dset['filename'],level=nl.level.warning)
                                        new_sess[label].append(dset)
                    s._sessions[sess]['labels'] = new_sess
            s.save()
示例#19
0
        if 'tags' not in sess:
            sess['tags'] = []
        sess['tags'] += args.tag

    if args.date:
        nl.notify('Setting date to %s' % args.date)
        sess['date'] = args.date

    if args.verify:
        sess['unverified'] = False
    else:
        sess['unverified'] = True

    subj.save()

sess_dir = os.path.join(p.sessions_dir(subj), args.session)
if args.scan_sheet:
    if os.path.exists(args.scan_sheet):
        with p.maint.commit_wrap():
            try:
                shutil.copy(args.scan_sheet, sess_dir)
                sess['scan_sheets'] = os.path.basename(args.scan_sheets)
            except:
                pass

if args.dsets:
    for i in xrange(len(args.dsets)):
        with p.maint.commit_wrap():
            dset = args.dsets[i]
            try:
                meta = args.meta[i]
示例#20
0
        for t in args.tag:
            if t not in sess['tags']:
                sess['tags'].append(t)

    if args.date:
        nl.notify('Setting date to %s' % args.date)
        sess['date'] = args.date

    if args.verify:
        sess['unverified'] = False
    else:
        sess['unverified'] = True

    subj.save()

sess_dir = os.path.join(p.sessions_dir(subj),args.session)
if args.scan_sheet:
    if os.path.exists(args.scan_sheet):
        with p.maint.commit_wrap():
            try:
                shutil.copy(args.scan_sheet,sess_dir)
                sess['scan_sheets'] = os.path.basename(args.scan_sheets)
            except:
                pass

if args.dsets:
    for i in xrange(len(args.dsets)):
        with p.maint.commit_wrap():
            dset = args.dsets[i]
            try:
                meta = args.meta[i]