def main(cmdlineargs=None): # PYTHON_ARGCOMPLETE_OK parser = setup_parser() try: import argcomplete argcomplete.autocomplete(parser) except ImportError: pass # parse cmd args cmdlineargs = parser.parse_args(cmdlineargs) if not cmdlineargs.change_path is None: for path in cmdlineargs.change_path: chpwd(path) # run the function associated with the selected command if cmdlineargs.common_debug: # So we could see/stop clearly at the point of failure setup_exceptionhook() cmdlineargs.func(cmdlineargs) else: # Otherwise - guard and only log the summary. Postmortem is not # as convenient if being caught in this ultimate except try: cmdlineargs.func(cmdlineargs) except Exception as exc: lgr.error('%s (%s)' % (exc_str(exc), exc.__class__.__name__)) sys.exit(1)
def get_native_metadata(ds, guess_type=False, ds_identifier=None): """Parse a dataset to gather its native metadata Returns ------- List Each item in the list is a metadata dictionary (JSON-LD compliant). The first items corresponds to the annex-based metadata of the dataset. The last items contains the native metadata of the dataset content. Any additional items correspond to subdataset metadata sets. """ if ds_identifier is None: ds_identifier = ds.id # using a list, because we could get multiple sets of meta data per # dataset, and we want to quickly collect them without having to do potentially # complex graph merges meta = [] # get native metadata nativetypes = get_metadata_type(ds, guess=guess_type) if not nativetypes: return meta # keep local, who knows what some parsers might pull in from . import parsers for nativetype in nativetypes: if nativetype == 'aggregate': # this is special and needs to be ignored here, even if it was # configured. reason: this parser runs anyway in get_metadata() continue pmod = import_module('.{}'.format(nativetype), package=parsers.__package__) try: native_meta = pmod.MetadataParser(ds).get_metadata(ds_identifier) except Exception as e: lgr.error('failed to get native metadata ({}): {}'.format( nativetype, exc_str(e))) continue if native_meta: # TODO here we could apply a "patch" to the native metadata, if desired # try hard to keep things a simple non-nested list if isinstance(native_meta, list): meta.extend(native_meta) else: meta.append(native_meta) return meta
def get_native_metadata(ds, guess_type=False, ds_identifier=None): """Parse a dataset to gather its native metadata Returns ------- List Each item in the list is a metadata dictionary (JSON-LD compliant). The first items corresponds to the annex-based metadata of the dataset. The last items contains the native metadata of the dataset content. Any additional items correspond to subdataset metadata sets. """ if ds_identifier is None: ds_identifier = ds.id # using a list, because we could get multiple sets of meta data per # dataset, and we want to quickly collect them without having to do potentially # complex graph merges meta = [] # get native metadata nativetypes = get_metadata_type(ds, guess=guess_type) if not nativetypes: return meta # keep local, who knows what some parsers might pull in from . import parsers for nativetype in nativetypes: if nativetype == 'aggregate': # this is special and needs to be ignored here, even if it was # configured. reason: this parser runs anyway in get_metadata() continue pmod = import_module('.{}'.format(nativetype), package=parsers.__package__) try: native_meta = pmod.MetadataParser(ds).get_metadata(ds_identifier) except Exception as e: lgr.error('failed to get native metadata ({}): {}'.format(nativetype, exc_str(e))) continue if native_meta: # TODO here we could apply a "patch" to the native metadata, if desired # try hard to keep things a simple non-nested list if isinstance(native_meta, list): meta.extend(native_meta) else: meta.append(native_meta) return meta
def main(args=None): parser = setup_parser() # parse cmd args args = parser.parse_args(args) # run the function associated with the selected command if args.common_debug: # So we could see/stop clearly at the point of failure setup_exceptionhook() args.func(args) else: # Otherwise - guard and only log the summary. Postmortem is not # as convenient if being caught in this ultimate except try: args.func(args) except Exception as exc: lgr.error('%s (%s)' % (str(exc), exc.__class__.__name__)) sys.exit(1)
def main(args=None): # PYTHON_ARGCOMPLETE_OK parser = setup_parser() try: import argcomplete argcomplete.autocomplete(parser) except ImportError: pass # parse cmd args cmdlineargs = parser.parse_args(args) if not cmdlineargs.change_path is None: for path in cmdlineargs.change_path: chpwd(path) ret = None if cmdlineargs.pbs_runner: from .helpers import run_via_pbs from .helpers import strip_arg_from_argv from .common_args import pbs_runner as pbs_runner_opt args_ = strip_arg_from_argv(args or sys.argv, cmdlineargs.pbs_runner, pbs_runner_opt[1]) # run the function associated with the selected command run_via_pbs(args_, cmdlineargs.pbs_runner) elif cmdlineargs.common_debug: # So we could see/stop clearly at the point of failure setup_exceptionhook() ret = cmdlineargs.func(cmdlineargs) else: # Otherwise - guard and only log the summary. Postmortem is not # as convenient if being caught in this ultimate except try: ret = cmdlineargs.func(cmdlineargs) except InsufficientArgumentsError as exc: # if the func reports inappropriate usage, give help output lgr.error('%s (%s)' % (exc_str(exc), exc.__class__.__name__)) cmdlineargs.subparser.print_usage() sys.exit(1) except Exception as exc: lgr.error('%s (%s)' % (exc_str(exc), exc.__class__.__name__)) sys.exit(1) if hasattr(cmdlineargs, 'result_renderer'): cmdlineargs.result_renderer(ret)
def run(args): import glob import os import os.path from datalad.api import Dataset from datalad.log import lgr lgr.debug("Command line arguments: %r" % args) # Since GitPython doesn't recognize we ar with in a repo, if we are # deeper down the tree, walk upwards and look for '.git': # TODO: May be provide a patch for GitPython to have it cleaner. cwd_before = cwd = os.getcwd() while True: if os.path.exists(os.path.join(cwd, '.git')): break else: if cwd == '/': # TODO: Is this platform-dependend? lgr.error("No repository found.") raise ValueError # TODO: Proper Exception or clean exit? else: os.chdir(os.pardir) cwd = os.getcwd() ds = Dataset(cwd) os.chdir(cwd_before) # args.path comes as a list # Expansions (like globs) provided by the shell itself are already done. # But: We don't know exactly what shells we are running on and what it may provide or not. # Therefore, make any expansion we want to guarantee, per item of the list: expanded_list = [] for item in args.path: expanded_list.extend(glob.glob(item)) # TODO: regexp + may be ext. glob zsh-style # TODO: what about spaces in filenames and similar things? # TODO: os.path.expandvars, os.path.expanduser? Is not needed here, isn't it? Always? ds.get(expanded_list)
def main(args=None, cls=None, remote_name=None, description=None): import sys parser = setup_parser(remote_name, description) # parse cmd args args = parser.parse_args(args) # stdin/stdout will be used for interactions with annex ui.set_backend('annex') if args.common_debug: # So we could see/stop clearly at the point of failure setup_exceptionhook() _main(args, cls) else: # Otherwise - guard and only log the summary. Postmortem is not # as convenient if being caught in this ultimate except try: _main(args, cls) except Exception as exc: lgr.error('%s (%s)', str(exc), exc.__class__.__name__) sys.exit(1)
def main(args=None, backend=None): import sys # TODO: redirect lgr output to stderr if it is stdout and not "forced" # by env variable... # parser = setup_parser() # parse cmd args args = parser.parse_args(args) if args.common_debug: # So we could see/stop clearly at the point of failure setup_exceptionhook() _main(args, backend) else: # Otherwise - guard and only log the summary. Postmortem is not # as convenient if being caught in this ultimate except try: _main(args, backend) except Exception as exc: lgr.error('%s (%s)' % (str(exc), exc.__class__.__name__)) sys.exit(1)
def __call__(self, url, name=None): # TODO: After publishing new demo collection, adapt doctest """ Examples -------- >>> from datalad.api import register_collection, list_collections >>> def test_register_collection_simple(): ... assert(col.name not in [c.name for c in list_collections()]) ... col =register_collection("http://collections.datalad.org/demo/DATALAD_COL_demo_collection") ... assert(col.name in [c.name for c in list_collections()]) ... assert(col.url == "http://collections.datalad.org/demo/DATALAD_COL_demo_collection/.git") ... assert(col.name == "DATALAD_COL_demo_collection") Returns ------- Collection """ local_master = get_datalad_master() # check whether url is a local path: if isdir(abspath(expandvars(expanduser(url)))): url = abspath(expandvars(expanduser(url))) # raise exception, if it's not a valid collection: repo = CollectionRepo(url, create=False) if name is None: name = repo.name else: # Try to auto complete collection's url: url += '/' if not url.endswith('/') else '' url_completions = [url, url + '.git', url + url.rstrip('/').split('/')[-1] + '.datalad-collection/.git'] url_ok = False for address in url_completions: try: # use ls-remote to verify git can talk to that repository: local_master.git_ls_remote(address, "-h") url = address url_ok = True break except CommandError as e: if re.match("fatal.+?%s.+?not found" % url, e.stderr): continue else: lgr.error("Registering collection failed.\n%s" % e) return if not url_ok: lgr.error("Registering collection failed. " "Couldn't find remote repository.") return if name is None: # derive name from url: parts = url.rstrip('/').split('/') if parts[-1] == '.git': name = parts[-2] elif parts[-1].endswith('.git'): name = parts[-1][0:-4] elif parts[-1].endswith('.datalad-collection'): name = parts[-1][0:-19] else: name = parts[-1] local_master.git_remote_add(name, url) local_master.git_fetch(name) return Collection(CollectionRepoBackend(local_master, name + "/master"))