Exemplo n.º 1
0
def create_app(annex_path):
    # If running under gunicorn, use that logger
    gunicorn_logger = logging.getLogger('gunicorn.error')
    logging.basicConfig(handlers=gunicorn_logger.handlers,
                        level=gunicorn_logger.level)
    if datalad_service.config.SENTRY_DSN:
        sentry_sdk.init(
            dsn=datalad_service.config.SENTRY_DSN,
            integrations=[FalconIntegration()]
        )

    api = falcon.API(
        middleware=[AuthenticateMiddleware(), ElasticApmMiddleware(service_name='datalad-service', server_url=datalad_service.config.ELASTIC_APM_SERVER_URL)])
    api.router_options.converters['path'] = PathConverter

    store = DataladStore(annex_path)

    heartbeat = HeartbeatResource()
    datasets = DatasetResource(store)
    dataset_draft = DraftResource(store)
    dataset_description = DescriptionResource(store)
    dataset_files = FilesResource(store)
    dataset_publish = PublishResource(store)
    dataset_snapshots = SnapshotResource(store)
    dataset_upload = UploadResource(store)
    dataset_upload_file = UploadFileResource(store)

    api.add_route('/heartbeat', heartbeat)

    api.add_route('/datasets', datasets)
    api.add_route('/datasets/{dataset}', datasets)

    api.add_route('/datasets/{dataset}/draft', dataset_draft)
    api.add_route('/datasets/{dataset}/description', dataset_description)

    api.add_route('/datasets/{dataset}/files', dataset_files)
    api.add_route('/datasets/{dataset}/files/{filename:path}', dataset_files)

    api.add_route('/datasets/{dataset}/snapshots', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files', dataset_files)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files/{filename:path}', dataset_files)

    api.add_route(
        '/datasets/{dataset}/publish', dataset_publish
    )

    api.add_route(
        '/datasets/{dataset}/upload/{upload}', dataset_upload
    )
    api.add_route(
        '/uploads/{worker}/{dataset}/{upload}/{filename:path}', dataset_upload_file
    )

    return api
Exemplo n.º 2
0
def create_app(annex_path):
    raven.setup()

    @app.on_after_configure.connect
    def schedule_celery_tasks(sender, **kwargs):
        """Run all periodic tasks."""
        sender.add_periodic_task(
            60 * 15, audit_datasets.s(annex_path), queue=publish_queue())

    api = application = falcon.API(middleware=AuthenticateMiddleware())
    api.router_options.converters['path'] = PathConverter

    raven.falcon_handler(api)

    store = DataladStore(annex_path)

    heartbeat = HeartbeatResource()
    datasets = DatasetResource(store)
    dataset_draft = DraftResource(store)
    dataset_description = DescriptionResource(store)
    dataset_files = FilesResource(store)
    dataset_objects = ObjectsResource(store)
    dataset_publish = PublishResource(store)
    dataset_snapshots = SnapshotResource(store)

    api.add_route('/heartbeat', heartbeat)

    api.add_route('/datasets', datasets)
    api.add_route('/datasets/{dataset}', datasets)

    api.add_route('/datasets/{dataset}/draft', dataset_draft)
    api.add_route('/datasets/{dataset}/description', dataset_description)

    api.add_route('/datasets/{dataset}/files', dataset_files)
    api.add_route('/datasets/{dataset}/files/{filename:path}', dataset_files)

    api.add_route('/datasets/{dataset}/objects', dataset_objects)
    api.add_route(
        '/datasets/{dataset}/objects/{filekey:path}', dataset_objects)

    api.add_route('/datasets/{dataset}/snapshots', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files', dataset_files)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files/{filename:path}', dataset_files)

    api.add_route(
        '/datasets/{dataset}/publish', dataset_publish
    )

    return api
Exemplo n.º 3
0
def create_app(annex_path):
    raven.setup()

    api = application = falcon.API(middleware=AuthenticateMiddleware())
    api.router_options.converters['path'] = PathConverter

    raven.falcon_handler(api)

    store = DataladStore(annex_path)

    heartbeat = HeartbeatResource()
    datasets = DatasetResource(store)
    dataset_draft = DraftResource(store)
    dataset_files = FilesResource(store)
    dataset_objects = ObjectsResource(store)
    dataset_publish = PublishResource(store)
    dataset_snapshots = SnapshotResource(store)

    api.add_route('/heartbeat', heartbeat)

    api.add_route('/datasets', datasets)
    api.add_route('/datasets/{dataset}', datasets)

    api.add_route('/datasets/{dataset}/draft', dataset_draft)

    api.add_route('/datasets/{dataset}/files', dataset_files)
    api.add_route('/datasets/{dataset}/files/{filename:path}', dataset_files)

    api.add_route('/datasets/{dataset}/objects', dataset_objects)
    api.add_route('/datasets/{dataset}/objects/{filekey:path}',
                  dataset_objects)

    api.add_route('/datasets/{dataset}/snapshots', dataset_snapshots)
    api.add_route('/datasets/{dataset}/snapshots/{snapshot}',
                  dataset_snapshots)
    api.add_route('/datasets/{dataset}/snapshots/{snapshot}/files',
                  dataset_files)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files/{filename:path}',
        dataset_files)

    api.add_route('/datasets/{dataset}/publish', dataset_publish)
    return api
Exemplo n.º 4
0
def datalad_store(tmpdir_factory):
    path = tmpdir_factory.mktemp('annexes')
    ds_path = str(path.join(DATASET_ID))
    # Create an empty dataset for testing
    ds = Dataset(ds_path)
    ds.create()
    ds.no_annex(BIDS_NO_ANNEX)

    json_path = os.path.join(ds_path, 'dataset_description.json')
    with open(json_path, 'w') as f:
        json.dump(DATASET_DESCRIPTION, f, ensure_ascii=False)
    ds.save(json_path)

    changes_path = os.path.join(ds_path, 'CHANGES')
    with open(changes_path, 'w') as f:
        json.dump(CHANGES, f, ensure_ascii=False)
    ds.save(changes_path)

    ds.save(version_tag=SNAPSHOT_ID)
    # Setup a seed for any new_dataset uses
    random.seed(42)
    return DataladStore(path)
Exemplo n.º 5
0
 def dataset_task_decorator(*args, **kwargs):
     annex_path = args[0]
     return func(DataladStore(annex_path), *args[1:], **kwargs)
Exemplo n.º 6
0
def create_app(annex_path):
    # If running under gunicorn, use that logger
    gunicorn_logger = logging.getLogger('gunicorn.error')
    logging.basicConfig(handlers=gunicorn_logger.handlers,
                        level=gunicorn_logger.level)
    if datalad_service.config.SENTRY_DSN:
        sentry_sdk.init(
            dsn=datalad_service.config.SENTRY_DSN,
            integrations=[FalconIntegration()]
        )

    middleware = [AuthenticateMiddleware()]
    if datalad_service.config.ELASTIC_APM_SERVER_URL:
        middleware.append(ElasticApmMiddleware(service_name='datalad-service',
                                               server_url=datalad_service.config.ELASTIC_APM_SERVER_URL))

    api = falcon.API(
        middleware=middleware)
    api.router_options.converters['path'] = PathConverter

    store = DataladStore(annex_path)

    heartbeat = HeartbeatResource()
    datasets = DatasetResource(store)
    dataset_draft = DraftResource(store)
    dataset_validation = ValidationResource(store)
    dataset_history = HistoryResource(store)
    dataset_description = DescriptionResource(store)
    dataset_files = FilesResource(store)
    dataset_annex_objects = AnnexObjectsResource(store)
    dataset_publish = PublishResource(store)
    dataset_snapshots = SnapshotResource(store)
    dataset_upload = UploadResource(store)
    dataset_upload_file = UploadFileResource(store)
    dataset_git_refs_resource = GitRefsResource(store)
    dataset_git_receive_resource = GitReceiveResource(store)
    dataset_git_upload_resource = GitUploadResource(store)
    dataset_git_annex_resource = GitAnnexResource(store)
    dataset_reexporter_resources = ReexporterResource(store)
    dataset_reset_resource = ResetResource(store)
    dataset_remote_import_resource = RemoteImportResource(store)

    api.add_route('/heartbeat', heartbeat)

    api.add_route('/datasets', datasets)
    api.add_route('/datasets/{dataset}', datasets)

    api.add_route('/datasets/{dataset}/draft', dataset_draft)
    api.add_route('/datasets/{dataset}/history', dataset_history)
    api.add_route('/datasets/{dataset}/description', dataset_description)
    api.add_route('/datasets/{dataset}/validate/{hexsha}', dataset_validation)
    api.add_route('/datasets/{dataset}/reset/{hexsha}', dataset_reset_resource)

    api.add_route('/datasets/{dataset}/files', dataset_files)
    api.add_route('/datasets/{dataset}/files/{filename:path}', dataset_files)

    api.add_route('/datasets/{dataset}/snapshots', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}', dataset_snapshots)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files', dataset_files)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/files/{filename:path}', dataset_files)
    api.add_route(
        '/datasets/{dataset}/snapshots/{snapshot}/annex-key/{annex_key}', dataset_annex_objects)

    api.add_route(
        '/datasets/{dataset}/publish', dataset_publish
    )
    api.add_route('/datasets/{dataset}/reexport-remotes',
                  dataset_reexporter_resources)

    api.add_route(
        '/datasets/{dataset}/upload/{upload}', dataset_upload
    )
    api.add_route(
        '/uploads/{worker}/{dataset}/{upload}/{filename:path}', dataset_upload_file
    )

    api.add_route('/git/{worker}/{dataset}/info/refs',
                  dataset_git_refs_resource)
    api.add_route('/git/{worker}/{dataset}/git-receive-pack',
                  dataset_git_receive_resource)
    api.add_route('/git/{worker}/{dataset}/git-upload-pack',
                  dataset_git_upload_resource)
    api.add_route('/git/{worker}/{dataset}/annex/{key}',
                  dataset_git_annex_resource)

    api.add_route('/datasets/{dataset}/import/{import_id}',
                  dataset_remote_import_resource)

    return api