def publish_target(dataset, target, treeish): """ Publish target of dataset. This exists so the actual publish can be easily mocked. """ if target == 'github': return github_export(dataset, target) else: return s3_export(dataset, target, treeish)
def export_dataset(dataset_path, cookies=None, s3_export=s3_export, github_export=github_export, github_enabled=DATALAD_GITHUB_EXPORTS_ENABLED): """ Export dataset to S3 and GitHub. If the dataset has not been configured with public remotes, this is a noop. """ if is_git_annex_remote(dataset_path, get_s3_remote()): dataset = os.path.basename(dataset_path) repo = pygit2.Repository(dataset_path) tags = git_tag(repo) # Iterate over all tags and push those for tag in tags: s3_export(dataset_path, get_s3_remote(), tag.name) # Once all S3 tags are exported, update GitHub if github_enabled: # Perform all GitHub export steps github_export(dataset_path, tag.name) # Drop cache once all exports are complete clear_dataset_cache(dataset, cookies)