예제 #1
0
파일: files.py 프로젝트: kousu/openneuro
def commit_files(store,
                 dataset,
                 files,
                 name=None,
                 email=None,
                 validate=True,
                 cookies=None):
    """
    Commit a list of files with the email and name provided.

    Returns the commit hash generated.
    """
    ds = store.get_dataset(dataset)
    with CommitInfo(ds, name, email):
        if files:
            for filename in files:
                ds.add(filename)
        else:
            # If no list of paths, add all untracked files
            ds.add('.')
    ref = ds.repo.get_hexsha()
    if validate:
        # Run the validator but don't block on the request
        queue = dataset_queue(dataset)
        validate_dataset.s(dataset, ds.path, ref,
                           cookies).apply_async(queue=queue)
    return ref
예제 #2
0
def create_dataset(store, dataset, name=None, email=None):
    """Create a DataLad git-annex repo for a new dataset."""
    ds = store.get_dataset(dataset)
    with CommitInfo(None, name, email, where='global'):
        ds.create()
        ds.no_annex(BIDS_NO_ANNEX)
        if not ds.repo:
            raise Exception('Repo creation failed.')
예제 #3
0
파일: files.py 프로젝트: kousu/openneuro
def remove_recursive(store,
                     dataset,
                     path,
                     name=None,
                     email=None,
                     cookies=None):
    """Remove a path within a dataset recursively."""
    ds = store.get_dataset(dataset)
    with CommitInfo(ds, name, email):
        ds.remove(path, recursive=True, check=False)
        update_head(store, dataset, cookies=cookies)
예제 #4
0
 def _finish_upload(self, dataset_id, upload, name, email, cookies):
     try:
         ds = self.store.get_dataset(dataset_id)
         with CommitInfo(ds, name, email):
             upload_path = self.store.get_upload_path(dataset_id, upload)
             unlock_files = [
                 os.path.relpath(filename, start=upload_path)
                 for filename in pathlib.Path(upload_path).glob('**/*')
                 if os.path.islink(
                     os.path.join(
                         ds.path,
                         os.path.relpath(filename, start=upload_path)))
             ]
             gevent.sleep()
             move_files(upload_path, ds.path)
             ds.save(unlock_files)
             update_head(ds, dataset_id, cookies)
             gevent.sleep()
             shutil.rmtree(upload_path)
     except:
         self.logger.exception('Dataset upload could not be finalized')
         sentry_sdk.capture_exception()
예제 #5
0
 def _finish_upload(self, dataset_id, upload, name, email, cookies):
     try:
         ds = self.store.get_dataset(dataset_id)
         with CommitInfo(ds, name, email):
             upload_path = self.store.get_upload_path(dataset_id, upload)
             unlock_files = [
                 os.path.relpath(filename, start=upload_path)
                 for filename in glob.iglob(upload_path + '**/**',
                                            recursive=True)
                 if os.path.islink(
                     os.path.join(
                         ds.path,
                         os.path.relpath(filename, start=upload_path)))
             ]
             ds.unlock(unlock_files)
             shutil.copytree(upload_path, ds.path, dirs_exist_ok=True)
             shutil.rmtree(upload_path)
             ds.save(unlock_files)
             update_head(ds, dataset_id, cookies)
     except:
         self.logger.exception('Dataset upload could not be finalized')
         sentry_sdk.capture_exception()
예제 #6
0
파일: files.py 프로젝트: kousu/openneuro
def remove_files(store, dataset, files, name=None, email=None, cookies=None):
    ds = store.get_dataset(dataset)
    with CommitInfo(ds, name, email):
        for filename in files:
            ds.remove(filename, check=False)
            update_head(store, dataset, cookies)