Exemplo n.º 1
0
def publish_snapshot(store, dataset, snapshot, cookies=None, realm=None):
    """Publish a snapshot tag to S3, GitHub or both."""
    ds = store.get_dataset(dataset)
    siblings = ds.siblings()

    # if realm parameter is not included, find the best target
    if realm is None:
        # if the dataset has a public sibling, use this as the export target
        # otherwise, use the private as the export target
        public_bucket_name = DatasetRealm(DatasetRealm.PUBLIC).s3_remote
        has_public_bucket = get_sibling_by_name(public_bucket_name, siblings)
        if has_public_bucket:
            realm = DatasetRealm(DatasetRealm.PUBLIC)
        else:
            realm = DatasetRealm(DatasetRealm.PRIVATE)
    else:
        realm = get_s3_realm(realm=realm)

    # Create the sibling if it does not exist
    s3_sibling(ds, siblings)

    # Export to S3 and GitHub in another worker
    publish_s3_async \
        .s(store.annex_path, dataset, snapshot,
           realm.s3_remote, realm.s3_bucket, cookies) \
        .apply_async(queue=publish_queue())

    # Public publishes to GitHub
    if realm == DatasetRealm.PUBLIC and DATALAD_GITHUB_EXPORTS_ENABLED:
        # Create Github sibling only if GitHub is enabled
        github_sibling(ds, dataset, siblings)
        publish_github_async \
            .s(store.annex_path, dataset, snapshot, realm.github_remote) \
            .apply_async(queue=publish_queue())
Exemplo n.º 2
0
def publish_snapshot(store, dataset, snapshot, cookies=None, realm=None):
    """Publish a snapshot tag to S3, GitHub or both."""
    dataset_id = dataset
    ds = store.get_dataset(dataset)
    siblings = ds.siblings()

    # if realm parameter is not included, find the best target
    if realm is None:
        # if the dataset has a public sibling, use this as the export target
        # otherwise, use the private as the export target
        public_bucket_name = DatasetRealm(DatasetRealm.PUBLIC).s3_remote
        has_public_bucket = get_sibling_by_name(public_bucket_name, siblings)
        if has_public_bucket:
            realm = DatasetRealm(DatasetRealm.PUBLIC)
        else:
            realm = DatasetRealm(DatasetRealm.PRIVATE)
    else:
        realm = get_s3_realm(realm=realm)

    s3_remote = s3_sibling(ds, siblings)
    publish_target(ds, realm.s3_remote, snapshot)
    versions = s3_versions(ds, realm, snapshot)
    if (len(versions)):
        r = requests.post(url=GRAPHQL_ENDPOINT,
                          json=file_urls_mutation(dataset_id, snapshot,
                                                  versions),
                          cookies=cookies)
        if r.status_code != 200:
            raise Exception(r.text)
    # Public publishes to GitHub
    if realm == DatasetRealm.PUBLIC and DATALAD_GITHUB_EXPORTS_ENABLED:
        github_remote = github_sibling(ds, dataset_id, siblings)
        publish_target(ds, realm.github_remote, snapshot)
Exemplo n.º 3
0
def get_dataset_realm(ds, siblings, realm=None):
    # if realm parameter is not included, find the best target
    if realm is None:
        # if the dataset has a public sibling, use this as the export target
        # otherwise, use the private as the export target
        public_bucket_name = DatasetRealm(DatasetRealm.PUBLIC).s3_remote
        has_public_bucket = get_sibling_by_name(public_bucket_name, siblings)
        if has_public_bucket:
            realm = DatasetRealm(DatasetRealm.PUBLIC)
        else:
            realm = DatasetRealm(DatasetRealm.PRIVATE)
    else:
        realm = get_s3_realm(realm=realm)
    return realm