Exemplo n.º 1
0
def get_version(default=None):
    if default is None:
        default = '0.0.1'
    print ''
    print fill('You can publish multiple versions of your app, and the ' + BOLD() + 'version' + ENDC() + ' of your app is a string with which to tag a particular version.  We encourage the use of Semantic Versioning for labeling your apps (see http://semver.org/ for more details).')
    version = prompt_for_var('Version', default)
    return version
Exemplo n.º 2
0
def get_strings(app_json, file_input_names, file_array_input_names, file_output_names, dummy_output_hash):
    init_inputs_str = ''
    dl_files_str = ''
    ul_files_str = ''
    outputs_str = ''
    inputs = []
    if 'inputSpec' in app_json and len(app_json['inputSpec']) > 0:
        init_inputs_str = '\n  '
        for input_param in app_json['inputSpec']:
            if ("optional" in input_param and input_param['optional']) or "default" in input_param:
                continue
            inputs.append(get_input_fmt(input_param))
        init_inputs_str += "\n  ".join(inputs)
        init_inputs_str += "\n"

    if len(file_input_names) > 0 or len(file_array_input_names) > 0:
        dl_files_str = "\n" + fill('''The following line(s) use the C++ bindings to download your file inputs to the local file system using variable names for the filenames.  To recover the original filenames, you can use the output of "variable.describe()["name"].get<string>()".''', initial_indent="  // ", subsequent_indent="  // ") + "\n\n"
        if len(file_input_names) > 0:
            dl_files_str += "\n  ".join(['  DXFile::downloadDXFile({name}.getID(), "{name}");'.format(name=fname) for fname in file_input_names]) + "\n"
        if len(file_array_input_names) > 0:
            dl_files_str += "\n  ".join(['  for (int i = 0; i < {name}.size(); i++) {{\n    DXFile::downloadDXFile({name}[i].getID(), "{name}-" + {name}[i].getID());\n  }}'.format(name=fname) for fname in file_array_input_names]) + "\n"

    if len(file_output_names) > 0:
        ul_files_str = "\n" + fill('''The following line(s) use the C++ bindings to upload your file outputs after you have created them on the local file system.  It assumes that you have used the output field name for the filename for each output, but you can change that behavior to suit your needs.''', initial_indent="  // ", subsequent_indent="  // ")
        ul_files_str +='\n\n  '
        ul_files_str += "\n  ".join(['DXFile {name} = DXFile::uploadLocalFile("{name}");'.format(name=name) for name in file_output_names]) + '\n'

    if "outputSpec" in app_json and len(app_json['outputSpec']) > 0:
        outputs_str = "  " + "\n  ".join(["output[\"" + param["name"] + "\"] = " + get_output_fmt(param) + ";" for param in app_json["outputSpec"]]) + '\n'
    return '', init_inputs_str, dl_files_str, ul_files_str, outputs_str
Exemplo n.º 3
0
def get_io_field(io_hash, defaults={}, delim='=', highlight_fields=[]):
    if io_hash is None:
        return '-'
    if len(io_hash) == 0 and len(defaults) == 0:
        return '-'

    def highlight_value(key, value):
        if key in highlight_fields:
            return YELLOW() + value + ENDC()
        else:
            return value

    if get_delimiter() is not None:
        return ('\n' + get_delimiter()).join(
            [(key + delim + highlight_value(key, io_val_to_str(value)))
             for key, value in io_hash.items()] +
            [('[' + key + delim + io_val_to_str(value) + ']')
             for key, value in defaults.items()])
    else:
        return ('\n').join([
            fill(key + ' ' + delim + ' ' +
                 highlight_value(key, io_val_to_str(value)),
                 initial_indent=' ' * 16,
                 subsequent_indent=' ' * 17,
                 break_long_words=False) for key, value in io_hash.items()
        ] + [
            fill('[' + key + ' ' + delim + ' ' + io_val_to_str(value) + ']',
                 initial_indent=' ' * 16,
                 subsequent_indent=' ' * 17,
                 break_long_words=False) for key, value in defaults.items()
        ])[16:]
Exemplo n.º 4
0
def get_version(default=None):
    if default is None:
        default = '0.0.1'
    print ''
    print fill('You can publish multiple versions of your app, and the ' + BOLD() + 'version' + ENDC() + ' of your app is a string with which to tag a particular version.  We encourage the use of Semantic Versioning for labeling your apps (see http://semver.org/ for more details).')
    version = prompt_for_var('Version', default)
    return version
Exemplo n.º 5
0
def get_language():
    #language_choices = language_options.keys()
    language_choices = ["Python", "C++", "bash"]
    use_completer(Completer(language_choices))
    print ''
    print fill('You can write your app in any ' + BOLD() + 'programming language' + ENDC() + ', but we provide templates for the following supported languages' + ENDC() + ": " + ', '.join(language_choices))
    language = prompt_for_var('Programming language', 'Python', choices=language_choices)
    use_completer()
    return language
Exemplo n.º 6
0
def get_language():
    #language_choices = language_options.keys()
    language_choices = ["Python", "C++", "bash"]
    use_completer(Completer(language_choices))
    print ''
    print fill('You can write your app in any ' + BOLD() + 'programming language' + ENDC() + ', but we provide templates for the following supported languages' + ENDC() + ": " + ', '.join(language_choices))
    language = prompt_for_var('Programming language', 'Python', choices=language_choices)
    use_completer()
    return language
Exemplo n.º 7
0
def get_strings(app_json, file_input_names, file_array_input_names, file_output_names, dummy_output_hash):
    input_sig_str = ''
    init_inputs_str = ''
    dl_files_str = ''
    ul_files_str = ''
    outputs_str = ''
    inputs = []
    init_inputs = []
    if "inputSpec" in app_json:
        # First, add all non-keyword args
        for input_param in app_json["inputSpec"]:
            if ("optional" in input_param and input_param['optional']) or "default" in input_param:
                continue
            inputs.append(input_param["name"])
            if input_param["class"] in class_to_dxclass:
                init_inputs.append("{name} = {dxclass}({name})".format(name=input_param["name"],
                                                                       dxclass=class_to_dxclass[input_param["class"]]))
            elif input_param["class"].startswith("array:") and input_param["class"][6:] in class_to_dxclass:
                init_inputs.append("{name} = [{dxclass}(item) for item in {name}]".format(name=input_param["name"],
                                                                                          dxclass=class_to_dxclass[input_param["class"][6:]]))

        # Then, add keyword args
        for input_param in app_json["inputSpec"]:
            if ("optional" not in input_param or not input_param['optional']) and "default" not in input_param:
                continue
            if "default" in input_param:
                inputs.append("{name}={default}".format(name=input_param["name"], default=(input_param["default"] if input_param['class'] != 'string' else '"' + input_param['default'] + '"')))
            else:
                inputs.append("{name}=None".format(name=input_param["name"]))
        input_sig_str = ", ".join(inputs)
    else:
        input_sig_str = "**kwargs"

    if len(init_inputs) > 0:
        init_inputs_str = '\n' + fill('The following line(s) initialize your data object inputs on the platform into dxpy.DXDataObject instances that you can start using immediately.', initial_indent='    # ', subsequent_indent='    # ', width=80)
        init_inputs_str += "\n\n    "
        init_inputs_str += "\n    ".join(init_inputs)
        init_inputs_str += "\n"

    if len(file_input_names) > 0 or len(file_array_input_names) > 0:
        dl_files_str = '\n' + fill('The following line(s) download your file inputs to the local file system using variable names for the filenames.', initial_indent='    # ', subsequent_indent='    # ', width=80) + '\n\n'
        if len(file_input_names) > 0:
            dl_files_str += "\n".join(['    dxpy.download_dxfile(' + name + '.get_id(), "' + name + '")' for name in file_input_names]) + "\n"
        if len(file_array_input_names) > 0:
            dl_files_str += "\n".join(['    for i in range(len({name})):\n        dxpy.download_dxfile({name}[i].get_id(), "{name}-" + str(i))'.format(name=name) for name in file_array_input_names]) + "\n"

    if len(file_output_names) > 0:
        ul_files_str = "\n" + fill('''The following line(s) use the Python bindings to upload your file outputs after you have created them on the local file system.  It assumes that you have used the output field name for the filename for each output, but you can change that behavior to suit your needs.''', initial_indent="    # ", subsequent_indent="    # ", width=80)
        ul_files_str +='\n\n    '
        ul_files_str += "\n    ".join(['{name} = dxpy.upload_local_file("{name}")'.format(name=name) for name in file_output_names]) + '\n'

    if 'outputSpec' in app_json and len(app_json['outputSpec']) > 0:
        outputs_str = "    " + "\n    ".join(['output["{name}"] = {value}'.format(name=param["name"], value=get_output_fmt(param)) for param in app_json['outputSpec']]) + '\n'

    return input_sig_str, init_inputs_str, dl_files_str, ul_files_str, outputs_str
Exemplo n.º 8
0
def print_intro(api_version):
    print DNANEXUS_LOGO() + ' App Wizard, API v' + api_version
    print ''

    print BOLD() + 'Basic Metadata' + ENDC()
    print ''
    print fill('''Please enter basic metadata fields that will be used to
describe your app.  Optional fields are denoted by options with square
brackets.  At the end of this wizard, the files necessary for building your
app will be generated from the answers you provide.''')
    print ''
Exemplo n.º 9
0
def print_intro(api_version):
    print DNANEXUS_LOGO() + ' App Wizard, API v' + api_version
    print ''

    print BOLD() + 'Basic Metadata' + ENDC()
    print ''
    print fill('''Please enter basic metadata fields that will be used to
describe your app.  Optional fields are denoted by options with square
brackets.  At the end of this wizard, the files necessary for building your
app will be generated from the answers you provide.''')
    print ''
Exemplo n.º 10
0
def cp(args):
    dest_proj, dest_path, _none = try_call(resolve_path,
                                           args.destination, 'folder')
    if dest_path is None:
        parser.exit(1, 'Cannot copy to a hash ID\n')
    dx_dest = dxpy.get_handler(dest_proj)
    try:
        # check if the destination exists
        dx_dest.list_folder(folder=dest_path, only='folders')
    except:
        cp_to_noexistent_destination(args, dest_path, dx_dest, dest_proj)
        return

    # The destination exists, we need to copy all of the sources to it.
    if len(args.sources) == 0:
        parser.exit(1, 'No sources provided to copy to another project\n')
    src_objects = []
    src_folders = []
    for source in args.sources:
        src_proj, src_folderpath, src_results = try_call(resolve_existing_path,
                                                         source,
                                                         allow_mult=True, all_mult=args.all)
        if src_proj == dest_proj:
            if is_hashid(source):
                # This is the only case in which the source project is
                # purely assumed, so give a better error message.
                parser.exit(1, fill('Error: You must specify a source project for ' + source) + '\n')
            else:
                parser.exit(1, fill('Error: A source path and the destination path resolved ' +
                                    'to the same project or container. Please specify ' +
                                    'different source and destination containers, e.g.') +
                            '\n  dx cp source-project:source-id-or-path dest-project:dest-path' + '\n')

        if src_proj is None:
            parser.exit(1, fill('Error: A source project must be specified or a current ' +
                                'project set in order to clone objects between projects') + '\n')

        if src_results is None:
            src_folders.append(src_folderpath)
        else:
            src_objects += [result['id'] for result in src_results]
    try:
        exists = dxpy.DXHTTPRequest('/' + src_proj + '/clone',
                                    {"objects": src_objects,
                                     "folders": src_folders,
                                     "project": dest_proj,
                                     "destination": dest_path})['exists']
        if len(exists) > 0:
            print(fill('The following objects already existed in the destination container ' +
                       'and were left alone:') + '\n ' + '\n '.join(exists))
    except:
        err_exit()
Exemplo n.º 11
0
def main(**kwargs):
    if len(kwargs) == 0:
        args = parser.parse_args(sys.argv[1:])
    else:
        args = parser.parse_args(kwargs)

    # Attempt to resolve name
    try:
        project, folderpath, entity_result = resolve_existing_path(args.path, expected='entity')
    except ResolutionError as details:
        parser.exit(1, fill(unicode(details)) + '\n')

    if entity_result is None:
        parser.exit(1, fill('Could not resolve ' + args.path + ' to a data object') + '\n')

    filename = args.output
    if filename is None:
        filename = entity_result['describe']['name'].replace('/', '%2F')

    dxtable = dxpy.get_handler(entity_result['id'])

    delimiter = ',' if args.csv else '\t'
    if args.output == '-':
        writer = csv.writer(sys.stdout, delimiter=delimiter)
    else:
        if args.output is None and not args.no_ext:
            filename += '.csv' if args.csv else '.tsv'
        if not args.overwrite and os.path.exists(filename):
            parser.exit(1, fill('Error: path \"' + filename + '\" already exists but -f/--overwrite was not set') + '\n')
        writer = csv.writer(open(filename, 'wb'),
                            delimiter=delimiter)
    if not args.no_header:
        writer.writerow((['__id__:int'] if args.rowid else []) + [(col['name'] + ':' + col['type']) for col in dxtable.describe()['columns']])

    # Query stuff
    if args.gri is not None:
        try:
            lo = int(args.gri[1])
            hi = int(args.gri[2])
        except:
            parser.exit(1, fill('Error: the LO and HI arguments to --gri must be integers') + '\n')
        gri_query = dxpy.DXGTable.genomic_range_query(args.gri[0],
                                                      lo,
                                                      hi,
                                                      args.gri_mode,
                                                      args.gri_name)
        iterator = dxtable.iterate_query_rows(query=gri_query, limit=args.limit)
    else:
        iterator = dxtable.iterate_rows(start=args.starting, end=(None if args.limit is None else args.starting + args.limit))
    for row in iterator:
        writer.writerow([unicode(item).encode('utf-8') for item in row[0 if args.rowid else 1:]])
Exemplo n.º 12
0
def cp(args):
    dest_proj, dest_path, _none = try_call(resolve_path, args.destination, expected='folder')
    if dest_path is None:
        raise DXCLIError('Cannot copy to a hash ID')
    dx_dest = dxpy.get_handler(dest_proj)
    try:
        # check if the destination exists
        dx_dest.list_folder(folder=dest_path, only='folders')
    except:
        cp_to_noexistent_destination(args, dest_path, dx_dest, dest_proj)
        return

    # The destination exists, we need to copy all of the sources to it.
    if len(args.sources) == 0:
        raise DXCLIError('No sources provided to copy to another project')
    src_objects = []
    src_folders = []
    for source in args.sources:
        src_proj, src_folderpath, src_results = try_call(resolve_existing_path,
                                                         source,
                                                         allow_mult=True, all_mult=args.all)
        if src_proj == dest_proj:
            if is_hashid(source):
                # This is the only case in which the source project is
                # purely assumed, so give a better error message.
                raise DXCLIError(fill('Error: You must specify a source project for ' + source))
            else:
                raise DXCLIError(fill('Error: A source path and the destination path resolved ' +
                                    'to the same project or container. Please specify ' +
                                    'different source and destination containers, e.g.') +
                                 '\n  dx cp source-project:source-id-or-path dest-project:dest-path')

        if src_proj is None:
            raise DXCLIError(fill('Error: A source project must be specified or a current ' +
                                  'project set in order to clone objects between projects'))

        if src_results is None:
            src_folders.append(src_folderpath)
        else:
            src_objects += [result['id'] for result in src_results]
    try:
        exists = dxpy.DXHTTPRequest('/' + src_proj + '/clone',
                                    {"objects": src_objects,
                                     "folders": src_folders,
                                     "project": dest_proj,
                                     "destination": dest_path})['exists']
        if len(exists) > 0:
            print(fill('The following objects already existed in the destination container ' +
                       'and were left alone:') + '\n ' + '\n '.join(exists))
    except:
        err_exit()
Exemplo n.º 13
0
def main(**kwargs):
    if len(kwargs) == 0:
        args = parser.parse_args(sys.argv[1:])
    else:
        args = parser.parse_args(kwargs)

    # Attempt to resolve name
    try:
        project, folderpath, entity_result = resolve_existing_path(args.path, expected='entity')
    except ResolutionError as details:
        parser.exit(1, fill(unicode(details)) + '\n')

    if entity_result is None:
        parser.exit(1, fill('Could not resolve ' + args.path + ' to a data object') + '\n')

    filename = args.output
    if filename is None:
        filename = entity_result['describe']['name'].replace('/', '%2F')

    dxtable = dxpy.get_handler(entity_result['id'])

    delimiter = ',' if args.csv else '\t'
    if args.output == '-':
        writer = csv.writer(sys.stdout, delimiter=delimiter)
    else:
        if args.output is None and not args.no_ext:
            filename += '.csv' if args.csv else '.tsv'
        if not args.overwrite and os.path.exists(filename):
            parser.exit(1, fill('Error: path \"' + filename + '\" already exists but -f/--overwrite was not set') + '\n')
        writer = csv.writer(open(filename, 'wb'),
                            delimiter=delimiter)
    if not args.no_header:
        writer.writerow((['__id__:int'] if args.rowid else []) + [(col['name'] + ':' + col['type']) for col in dxtable.describe()['columns']])

    # Query stuff
    if args.gri is not None:
        try:
            lo = int(args.gri[1])
            hi = int(args.gri[2])
        except:
            parser.exit(1, fill('Error: the LO and HI arguments to --gri must be integers') + '\n')
        gri_query = dxpy.DXGTable.genomic_range_query(args.gri[0],
                                                      lo,
                                                      hi,
                                                      args.gri_mode,
                                                      args.gri_name)
        iterator = dxtable.iterate_query_rows(query=gri_query, limit=args.limit)
    else:
        iterator = dxtable.iterate_rows(start=args.starting, end=(None if args.limit is None else args.starting + args.limit))
    for row in iterator:
        writer.writerow([unicode(item).encode('utf-8') for item in row[0 if args.rowid else 1:]])
Exemplo n.º 14
0
def get_pattern(template_dir):
    pattern_choices = []
    print ''
    print fill('The following common ' + BOLD() + 'execution patterns' + ENDC() + ' are currently available for your programming language:')

    pattern_choices.append('basic')
    print ' ' + BOLD() + 'basic' + ENDC()
    print fill('Your app will run on a single machine from beginning to end.', initial_indent='   ', subsequent_indent='   ')

    if os.path.isdir(os.path.join(template_dir, 'parallelized')):
        pattern_choices.append('parallelized')
        print ' ' + BOLD() + 'parallelized' + ENDC()
        print fill('Your app will subdivide a large chunk of work into multiple pieces that can be processed in parallel and independently of each other, followed by a final stage that will merge and process the results as necessary.', initial_indent='   ', subsequent_indent='   ')

    if os.path.isdir(os.path.join(template_dir, 'scatter-process-gather')):
        pattern_choices.append('scatter-process-gather')
        print ' ' + BOLD() + 'scatter-process-gather' + ENDC()
        print fill('Similar to ' + BOLD() + 'parallelized' + ENDC() + ' but with the addition of a "scatter" entry point.  This allows you to break out the execution for splitting up the input, or you can call a separate app/applet to perform the splitting.',
                   initial_indent='   ',
                   subsequent_indent='   ')

    if len(pattern_choices) == 1:
        print 'Automatically using the execution pattern "basic"'
        return 'basic'

    use_completer(Completer(pattern_choices))
    pattern = prompt_for_var('Execution pattern', 'basic', choices=pattern_choices)
    use_completer()
    return pattern
Exemplo n.º 15
0
def get_strings(app_json, file_input_names, file_array_input_names,
                file_output_names, dummy_output_hash):
    init_inputs_str = ''
    dl_files_str = ''
    ul_files_str = ''
    outputs_str = ''
    inputs = []
    if 'inputSpec' in app_json and len(app_json['inputSpec']) > 0:
        init_inputs_str = '\n  '
        for input_param in app_json['inputSpec']:
            if ("optional" in input_param
                    and input_param['optional']) or "default" in input_param:
                continue
            inputs.append(get_input_fmt(input_param))
        init_inputs_str += "\n  ".join(inputs)
        init_inputs_str += "\n"

    if len(file_input_names) > 0 or len(file_array_input_names) > 0:
        dl_files_str = "\n" + fill(
            '''The following line(s) use the C++ bindings to download your file inputs to the local file system using variable names for the filenames.  To recover the original filenames, you can use the output of "variable.describe()["name"].get<string>()".''',
            initial_indent="  // ",
            subsequent_indent="  // ") + "\n\n"
        if len(file_input_names) > 0:
            dl_files_str += "\n  ".join([
                '  DXFile::downloadDXFile({name}.getID(), "{name}");'.format(
                    name=fname) for fname in file_input_names
            ]) + "\n"
        if len(file_array_input_names) > 0:
            dl_files_str += "\n  ".join([
                '  for (int i = 0; i < {name}.size(); i++) {{\n    DXFile::downloadDXFile({name}[i].getID(), "{name}-" + {name}[i].getID());\n  }}'
                .format(name=fname) for fname in file_array_input_names
            ]) + "\n"

    if len(file_output_names) > 0:
        ul_files_str = "\n" + fill(
            '''The following line(s) use the C++ bindings to upload your file outputs after you have created them on the local file system.  It assumes that you have used the output field name for the filename for each output, but you can change that behavior to suit your needs.''',
            initial_indent="  // ",
            subsequent_indent="  // ")
        ul_files_str += '\n\n  '
        ul_files_str += "\n  ".join([
            'DXFile {name} = DXFile::uploadLocalFile("{name}");'.format(
                name=name) for name in file_output_names
        ]) + '\n'

    if "outputSpec" in app_json and len(app_json['outputSpec']) > 0:
        outputs_str = "  " + "\n  ".join([
            "output[\"" + param["name"] + "\"] = " + get_output_fmt(param) +
            ";" for param in app_json["outputSpec"]
        ]) + '\n'
    return '', init_inputs_str, dl_files_str, ul_files_str, outputs_str
Exemplo n.º 16
0
def get_pattern(template_dir):
    pattern_choices = []
    print ''
    print fill('The following common ' + BOLD() + 'execution patterns' + ENDC() + ' are currently available for your programming language:')

    pattern_choices.append('basic')
    print ' ' + BOLD() + 'basic' + ENDC()
    print fill('Your app will run on a single machine from beginning to end.', initial_indent='   ', subsequent_indent='   ')

    if os.path.isdir(os.path.join(template_dir, 'parallelized')):
        pattern_choices.append('parallelized')
        print ' ' + BOLD() + 'parallelized' + ENDC()
        print fill('Your app will subdivide a large chunk of work into multiple pieces that can be processed in parallel and independently of each other, followed by a final stage that will merge and process the results as necessary.', initial_indent='   ', subsequent_indent='   ')

    if os.path.isdir(os.path.join(template_dir, 'scatter-process-gather')):
        pattern_choices.append('scatter-process-gather')
        print ' ' + BOLD() + 'scatter-process-gather' + ENDC()
        print fill('Similar to ' + BOLD() + 'parallelized' + ENDC() + ' but with the addition of a "scatter" entry point.  This allows you to break out the execution for splitting up the input, or you can call a separate app/applet to perform the splitting.',
                   initial_indent='   ',
                   subsequent_indent='   ')

    if len(pattern_choices) == 1:
        print 'Automatically using the execution pattern "basic"'
        return 'basic'

    use_completer(Completer(pattern_choices))
    pattern = prompt_for_var('Execution pattern', 'basic', choices=pattern_choices)
    use_completer()
    return pattern
Exemplo n.º 17
0
def remove_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except IndexError:
        raise DXCLIError("Cannot remove a user who is not a member of the org")

    confirmed = not args.confirm
    if not confirmed:
        # Request interactive confirmation.
        print(
            fill(
                "WARNING: About to remove {u} from {o}; project permissions will{rpp} be removed and app permissions will{rap} be removed"
                .format(u=user_id,
                        o=args.org_id,
                        rpp="" if args.revoke_project_permissions else " not",
                        rap="" if args.revoke_app_permissions else " not")))

        if prompt_for_yn("Please confirm"):
            confirmed = True

    if confirmed:
        result = dxpy.api.org_remove_member(args.org_id,
                                            _get_org_remove_member_args(args))
        if args.brief:
            print(result["id"])
        else:
            print(fill("Removed {u} from {o}".format(u=user_id,
                                                     o=args.org_id)))
            print(
                fill("Removed {u} from the following projects:".format(
                    u=user_id)))
            if len(result["projects"].keys()) != 0:
                for project_id in result["projects"].keys():
                    print("\t{p}".format(p=project_id))
            else:
                print("\tNone")
            print(
                fill("Removed {u} from the following apps:".format(u=user_id)))
            if len(result["apps"].keys()) != 0:
                for app_id in result["apps"].keys():
                    print("\t{a}".format(a=app_id))
            else:
                print("\tNone")
    else:
        print(
            fill("Aborting removal of {u} from {o}".format(u=user_id,
                                                           o=args.org_id)))
Exemplo n.º 18
0
def remove_membership(args):
    # Will throw ResourceNotFound of the specified user is not currently a
    # member of the org.
    dxpy.api.org_get_member_access(args.org_id,
                                   {"user": "******" + args.username})

    confirmed = not args.confirm
    if not confirmed:
        # Request interactive confirmation.
        print(
            fill(
                "WARNING: About to remove user-{u} from {o}; project permissions will{rpp} be removed and app permissions will{rap} be removed"
                .format(u=args.username,
                        o=args.org_id,
                        rpp="" if args.revoke_project_permissions else " not",
                        rap="" if args.revoke_app_permissions else " not")))

        if prompt_for_yn("Please confirm"):
            confirmed = True

    if confirmed:
        result = dxpy.api.org_remove_member(args.org_id,
                                            _get_org_remove_member_args(args))
        if args.brief:
            print(result["id"])
        else:
            print(
                fill("Removed user-{u} from {o}".format(u=args.username,
                                                        o=args.org_id)))
            print(
                fill("Removed user-{u} from the following projects:".format(
                    u=args.username)))
            if len(result["projects"].keys()) != 0:
                for project_id in result["projects"].keys():
                    print("\t{p}".format(p=project_id))
            else:
                print("\tNone")
            print(
                fill("Removed user-{u} from the following apps:".format(
                    u=args.username)))
            if len(result["apps"].keys()) != 0:
                for app_id in result["apps"].keys():
                    print("\t{a}".format(a=app_id))
            else:
                print("\tNone")
    else:
        print(
            fill("Aborting removal of user-{u} from {o}".format(
                u=args.username, o=args.org_id)))
Exemplo n.º 19
0
def get_io_spec(spec, skip_fields=None):
    if skip_fields is None:
        skip_fields = []
    filtered_spec = [
        param for param in spec if param["name"] not in skip_fields
    ]
    groups = defaultdict(list)
    for param in filtered_spec:
        groups[param.get('group')].append(param)

    list_of_params = []
    for param in groups.get(None, []):
        list_of_params.append(get_io_desc(param))
    for group in groups:
        if group is None:
            continue
        list_of_params.append("{g}:".format(g=group))
        for param in groups[group]:
            list_of_params.append("    " + get_io_desc(param))

    if len(skip_fields) > 0:
        list_of_params.append(
            "<advanced inputs hidden; use --verbose to see more>")

    if len(list_of_params) == 0:
        return '-'
    if get_delimiter() is not None:
        return ('\n' + get_delimiter()).join(list_of_params)
    else:
        return ('\n' + ' ' * 16).join([
            fill(param, subsequent_indent=' ' * 18, width_adjustment=-18)
            for param in list_of_params
        ])
Exemplo n.º 20
0
def update_org(args):
    org_update_inputs = _get_org_update_args(args)
    res = try_call(dxpy.api.org_update, args.org_id, org_update_inputs)
    if args.brief:
        print(res["id"])
    else:
        print(fill("Updated {o}".format(o=res["id"])))
Exemplo n.º 21
0
def download_one_file(project, file_desc, dest_filename, args):
    if not args.overwrite:
        if os.path.exists(dest_filename):
            err_exit(
                fill('Error: path "' + dest_filename +
                     '" already exists but -f/--overwrite was not set'))

    if file_desc['class'] != 'file':
        print(
            "Skipping non-file data object {name} ({id})".format(**file_desc),
            file=sys.stderr)
        return

    if file_desc['state'] != 'closed':
        print("Skipping file {name} ({id}) because it is not closed".format(
            **file_desc),
              file=sys.stderr)
        return

    try:
        show_progress = args.show_progress
    except AttributeError:
        show_progress = False

    try:
        dxpy.download_dxfile(file_desc['id'],
                             dest_filename,
                             show_progress=show_progress,
                             project=project)
        return
    except:
        err_exit()
Exemplo n.º 22
0
def update_org(args):
    org_update_inputs = _get_org_update_args(args)
    res = try_call(dxpy.api.org_update, args.org_id, org_update_inputs)
    if args.brief:
        print(res["id"])
    else:
        print(fill("Updated {o}".format(o=res["id"])))
Exemplo n.º 23
0
def get_find_jobs_string(jobdesc, has_children, single_result=False, show_outputs=True):
    '''
    :param jobdesc: hash of job describe output
    :param has_children: whether the job has subjobs to be printed
    :param single_result: whether the job is displayed as a single result or as part of a job tree
    '''
    is_origin_job = jobdesc['parentJob'] is None or single_result
    result = ("* " if is_origin_job and get_delimiter() is None else "")
    canonical_job_name = jobdesc['executableName'] + ":" + jobdesc['function']
    job_name = jobdesc.get('name', '<no name>')
    result += BOLD() + BLUE() + job_name + ENDC()
    if job_name != canonical_job_name and job_name+":main" != canonical_job_name:
        result += ' (' + canonical_job_name + ')'
    result += DELIMITER(' (') + JOB_STATES(jobdesc['state']) + DELIMITER(') ') + jobdesc['id']
    result += DELIMITER('\n' + (u'│ ' if is_origin_job and has_children else ("  " if is_origin_job else "")))
    result += jobdesc['launchedBy'][5:] + DELIMITER(' ')
    result += render_short_timestamp(jobdesc['created'])
    if jobdesc['state'] in ['done', 'failed', 'terminated', 'waiting_on_output']:
        # TODO: Remove this check once all jobs are migrated to have these values
        if 'stoppedRunning' in jobdesc and 'startedRunning' in jobdesc:
            result += " (runtime {r})".format(r=str(datetime.timedelta(seconds=int(jobdesc['stoppedRunning']-jobdesc['startedRunning'])/1000)))
    elif jobdesc['state'] == 'running':
        result += " (running for {rt})".format(rt=datetime.timedelta(seconds=int(time.time()-jobdesc['startedRunning']/1000)))

    if show_outputs:
        prefix = DELIMITER('\n' + (u'│ ' if is_origin_job and has_children else ("  " if is_origin_job else "")))
        if jobdesc.get("output") != None:
            result += job_output_to_str(jobdesc['output'], prefix=prefix)
        elif jobdesc['state'] == 'failed' and 'failureReason' in jobdesc:
            result += prefix + BOLD() + jobdesc['failureReason'] + ENDC() + ": " + fill(jobdesc.get('failureMessage', ''),
                                                                                        subsequent_indent=prefix.lstrip('\n'))

    return result
Exemplo n.º 24
0
 def prompt_for_optional_inputs(self):
     while True:
         print '\n' + fill('Select an optional parameter to set by its # (^D or <ENTER> to finish):') + '\n'
         for i in range(len(self.optional_inputs)):
             opt_str = ' [' + str(i) + '] ' + \
                 get_optional_input_str(self.input_spec[self.optional_inputs[i]])
             if self.optional_inputs[i] in self.inputs:
                 opt_str += ' [=' + GREEN()
                 opt_str += json.dumps(self.inputs[self.optional_inputs[i]])
                 opt_str += ENDC() + ']'
             elif 'default' in self.input_spec[self.optional_inputs[i]]:
                 opt_str += ' [default=' + json.dumps(self.input_spec[self.optional_inputs[i]]['default']) + ']'
             print opt_str
         print ""
         try:
             while True:
                 selected = raw_input('Optional param #: ')
                 if selected == '':
                     return
                 try:
                     opt_num = int(selected)
                     if opt_num < 0 or opt_num >= len(self.optional_inputs):
                         raise ValueError('Error: Selection is out of range')
                     break
                 except ValueError as details:
                     print unicode(details)
                     continue
         except EOFError:
             return
         try:
             self.inputs[self.optional_inputs[opt_num]] = self.prompt_for_input(self.optional_inputs[opt_num])
         except:
             pass
Exemplo n.º 25
0
def format_choices_or_suggestions(header, items, obj_class, initial_indent=' ' * 8, subsequent_indent=' ' * 10):
    if obj_class.startswith('array:'):
        obj_class = obj_class[6:]

    def format_data_object_reference(item):
        if dxpy.is_dxlink(item):
            # Bare dxlink
            obj_id, proj_id = dxpy.get_dxlink_ids(item)
            return (proj_id + ":" if proj_id else '') + obj_id
        if dxpy.is_dxlink(item.get('value')):
            # value is set
            obj_id, proj_id = dxpy.get_dxlink_ids(item['value'])
            return (proj_id + ":" if proj_id else '') + obj_id + (' (%s)' % item['name'] if item.get('name') else '')
        if item.get('project') and item.get('path'):
            # project and folder path
            return item['project'] + ':' + item['path'] + "/" + obj_class + "-*" +  (' (%s)' % item['name'] if item.get('name') else '')
        return str(item)

    showing_data_objects = obj_class in dx_data_classes

    if showing_data_objects:
        return initial_indent + header + ''.join('\n' + subsequent_indent + format_data_object_reference(item) for item in items)
    else:
        # TODO: in interactive prompts the quotes here may be a bit
        # misleading. Perhaps it should be a separate mode to print
        # "interactive-ready" suggestions.
        return fill(header + ' ' + ', '.join([pipes.quote(unicode(item)) for item in items]),
                    initial_indent=initial_indent,
                    subsequent_indent=subsequent_indent)
Exemplo n.º 26
0
def _ensure_local_dir(d):
    if not os.path.isdir(d):
        if os.path.exists(d):
            err_exit(
                fill('Error: path "' + d +
                     '" already exists and is not a directory'))
        os.makedirs(d)
Exemplo n.º 27
0
def get_io_spec(spec, skip_fields=None):
    if skip_fields is None:
        skip_fields = []
    filtered_spec = [param for param in spec if param["name"] not in skip_fields]
    groups = defaultdict(list)
    for param in filtered_spec:
        groups[param.get('group')].append(param)

    list_of_params = []
    for param in groups.get(None, []):
        list_of_params.append(get_io_desc(param))
    for group in groups:
        if group is None:
            continue
        list_of_params.append("{g}:".format(g=group))
        for param in groups[group]:
            list_of_params.append("    "+get_io_desc(param))

    if len(skip_fields) > 0:
        list_of_params.append("<advanced inputs hidden; use --verbose to see more>")

    if len(list_of_params) == 0:
        return '-'
    if get_delimiter() is not None:
        return ('\n' + get_delimiter()).join(list_of_params)
    else:
        return ('\n' + ' '*16).join([fill(param,
                                          subsequent_indent=' '*18,
                                          width_adjustment=-18) for param in list_of_params])
Exemplo n.º 28
0
def print_field(label, value):
    if get_delimiter() is not None:
        sys.stdout.write(label + get_delimiter() + value + '\n')
    else:
        sys.stdout.write(
            label + " " * (16 - len(label)) +
            fill(value, subsequent_indent=' ' * 16, width_adjustment=-16) +
            '\n')
Exemplo n.º 29
0
def update_membership(args):
    # Will throw ResourceNotFound of the specified user is not currently a
    # member of the org.
    dxpy.api.org_get_member_access(args.org_id, {"user": "******" + args.username})
    result = dxpy.api.org_set_member_access(args.org_id, _get_org_set_member_access_args(args))
    if args.brief:
        print(result["id"])
    else:
        print(fill("Updated membership of user-{u} in {o}".format(u=args.username, o=args.org_id)))
Exemplo n.º 30
0
def job_output_to_str(job_output, prefix='\n', title="Output: ", title_len=None):
    if len(job_output) == 0:
        return prefix + title + "-"
    else:
        if title_len is None:
            title_len = len(title)
        return prefix + title + (prefix+' '*title_len).join([fill(key + ' = ' + io_val_to_str(value),
                                                                   subsequent_indent=' '*9,
                                                                   break_long_words=False) for key, value in job_output.items()])
Exemplo n.º 31
0
def get_parallelized_io(file_input_names, gtable_input_names, gtable_output_names):
    input_field = ''
    output_field = ''

    if len(file_input_names) > 0 or len(gtable_input_names) > 0:
        print ''
        print fill('Your app template can be initialized to split and process a ' + BOLD() + 'GTable' + ENDC() + ' input.  The following of your input fields are eligible for this template pattern:')
        print '  ' + '\n  '.join(gtable_input_names)
        use_completer(Completer(gtable_input_names))
        input_field = prompt_for_var('Input field to process (press ENTER to skip)', '', choices=file_input_names + gtable_input_names)
        use_completer()

    if input_field != '' and len(gtable_output_names) > 0:
        print ''
        print fill('Your app template can be initialized to build a ' + BOLD() + 'GTable' + ENDC() + ' in parallel for your output.  The following of your output fields are eligible for this template pattern:')
        print '  ' + '\n  '.join(gtable_output_names)
        use_completer(Completer(gtable_output_names))
        output_field = prompt_for_var('Output gtable to build in parallel (press ENTER to skip)', '', choices=gtable_output_names)
    return input_field, output_field
Exemplo n.º 32
0
def get_parallelized_io(required_file_input_names, gtable_input_names, gtable_output_names):
    input_field = ''
    output_field = ''

    if required_file_input_names or gtable_input_names:
        print ''
        print fill('Your app template can be initialized to split and process a ' + BOLD() + 'GTable' + ENDC() + ' input.  The following of your input fields are eligible for this template pattern:')
        print '  ' + '\n  '.join(gtable_input_names)
        use_completer(Completer(gtable_input_names))
        input_field = prompt_for_var('Input field to process (press ENTER to skip)', '', choices=required_file_input_names + gtable_input_names)
        use_completer()

    if input_field != '' and len(gtable_output_names) > 0:
        print ''
        print fill('Your app template can be initialized to build a ' + BOLD() + 'GTable' + ENDC() + ' in parallel for your output.  The following of your output fields are eligible for this template pattern:')
        print '  ' + '\n  '.join(gtable_output_names)
        use_completer(Completer(gtable_output_names))
        output_field = prompt_for_var('Output gtable to build in parallel (press ENTER to skip)', '', choices=gtable_output_names)
    return input_field, output_field
Exemplo n.º 33
0
def wait_for_depends_on(depends_on, all_job_outputs):
    # Wait for depends_on and any data objects in the input to close
    if len(depends_on) > 0:
        print fill('Processing dependsOn and any DNAnexus links to closing objects in the input')
        for an_id in depends_on:
            try:
                print '  Waiting for ' + an_id + '...'
                if an_id.startswith('localjob'):
                    if all_job_outputs.get(an_id) is None:
                        raise Exception('Job ' + an_id + ' could not be found in local finished jobs')
                elif an_id.startswith('job'):
                    dxjob = dxpy.DXJob(an_id)
                    dxjob.wait_on_done()
                else:
                    handler = dxpy.get_handler(an_id)
                    desc = handler.describe()
                    handler._wait_on_close()
            except BaseException as e:
                raise Exception('Could not wait for ' + an_id + ': ' + str(e))
Exemplo n.º 34
0
def get_strings(app_json, file_input_names, file_array_input_names, file_output_names, dummy_output_hash):
    init_inputs_str = ''
    dl_files_str = ''
    ul_files_str = ''
    outputs_str = ''

    if 'inputSpec' in app_json and len(app_json['inputSpec']) > 0:
        init_inputs_str = "\n" + "\n".join(["    echo \"Value of {name}: '${var}'\"".format(name=input_param['name'], var=(("{" + input_param['name'] + "[@]}") if input_param['class'].startswith('array:') else input_param['name'])) for input_param in app_json['inputSpec']]) + "\n"

    if len(file_input_names) > 0 or len(file_array_input_names) > 0:
        dl_files_str = "\n" + fill('''The following line(s) use the dx command-line tool to download
your file inputs to the local file system using variable names for the filenames.
To recover the original filenames, you can use the output of "dx describe "$variable" --name".''',
                                    initial_indent='    # ', subsequent_indent='    # ', width=80) + '\n\n'
        if len(file_input_names) > 0:
            dl_files_str += "\n".join(['    dx download "$' + name + '" -o ' + name for name in file_input_names]) + '\n'
        if len(file_array_input_names) > 0:
            dl_files_str += "\n".join(['    for i in ${!' + name + '[@]}\n    do\n        dx download "${' + name + '[$i]}" -o ' + name + '-$i\n    done' for name in file_array_input_names]) + '\n'

    if len(file_output_names) > 0:
        ul_files_str = "\n" if init_inputs_str != '' else ""
        ul_files_str += fill('''The following line(s) use the dx command-line tool to upload your file outputs after you have created them on the local file system.  It assumes that you have used the output field name for the filename for each output, but you can change that behavior to suit your needs.  Run "dx upload -h" to see more options to set metadata.''',
                             initial_indent='    # ', subsequent_indent='    # ', width=80) + '\n\n'
        ul_files_str += "\n".join(['    {name}=$(dx upload {name} --brief)'.format(name=name) for name in file_output_names]) + "\n"

    if 'outputSpec' in app_json and len(app_json['outputSpec']) > 0:
        outputs_str = "\n" + fill('''The following line(s) use the utility dx-jobutil-add-output to format
and add output variables to your job's output as appropriate for the output class.  Run
\"dx-jobutil-add-output -h\" for more information on what it does.''',
                           initial_indent='    # ', subsequent_indent='    # ', width=80) + '\n\n'
        outputs_str += "\n".join(["    dx-jobutil-add-output " + output_param['name'] + ' "$' + output_param['name'] + '" --class=' + output_param['class'] for output_param in app_json['outputSpec']])
    elif 'outputSpec' not in app_json:
        outputs_str = "\n" + fill('''No output spec is specified, but
if you would like to add output fields, you can add invocations of the
dx-jobutil-add-output utility to format and add values to the
job_output.json file.  For example, "dx-jobutil-add-output keyname 32"
will add an output field called "keyname" with value equal to the
number 32.  Run \"dx-jobutil-add-output -h\" for more details on what
it does.''',
                                  initial_indent='    # ', subsequent_indent='    # ', width=80)

    return '', init_inputs_str, dl_files_str, ul_files_str, outputs_str
Exemplo n.º 35
0
def remove_membership(args):
    # Will throw ResourceNotFound of the specified user is not currently a
    # member of the org.
    dxpy.api.org_get_member_access(args.org_id,
                                   {"user": "******" + args.username})

    confirmed = not args.confirm
    if not confirmed:
        # Request interactive confirmation.
        print(fill("WARNING: About to remove user-{u} from {o}; project permissions will{rpp} be removed and app permissions will{rap} be removed".format(
            u=args.username, o=args.org_id,
            rpp="" if args.revoke_project_permissions else " not",
            rap="" if args.revoke_app_permissions else " not")))

        if prompt_for_yn("Please confirm"):
            confirmed = True

    if confirmed:
        result = dxpy.api.org_remove_member(args.org_id,
                                            _get_org_remove_member_args(args))
        if args.brief:
            print(result["id"])
        else:
            print(fill("Removed user-{u} from {o}".format(u=args.username,
                                                          o=args.org_id)))
            print(fill("Removed user-{u} from the following projects:".format(
                u=args.username)))
            if len(result["projects"].keys()) != 0:
                for project_id in result["projects"].keys():
                    print("\t{p}".format(p=project_id))
            else:
                print("\tNone")
            print(fill("Removed user-{u} from the following apps:".format(
                u=args.username)))
            if len(result["apps"].keys()) != 0:
                for app_id in result["apps"].keys():
                    print("\t{a}".format(a=app_id))
            else:
                print("\tNone")
    else:
        print(fill("Aborting removal of user-{u} from {o}".format(
            u=args.username, o=args.org_id)))
Exemplo n.º 36
0
def update_membership(args):
    # Will throw ResourceNotFound of the specified user is not currently a
    # member of the org.
    dxpy.api.org_get_member_access(args.org_id,
                                   {"user": "******" + args.username})
    result = dxpy.api.org_set_member_access(args.org_id,
                                            _get_org_set_member_access_args(args))
    if args.brief:
        print(result["id"])
    else:
        print(fill("Updated membership of user-{u} in {o}".format(
            u=args.username, o=args.org_id)))
Exemplo n.º 37
0
def remove_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except IndexError:
        raise DXCLIError("Cannot remove a user who is not a member of the org")

    confirmed = not args.confirm
    if not confirmed:
        # Request interactive confirmation.
        print(fill("WARNING: About to remove {u} from {o}; project permissions will{rpp} be removed and app permissions will{rap} be removed".format(
            u=user_id, o=args.org_id,
            rpp="" if args.revoke_project_permissions else " not",
            rap="" if args.revoke_app_permissions else " not")))

        if prompt_for_yn("Please confirm"):
            confirmed = True

    if confirmed:
        result = dxpy.api.org_remove_member(args.org_id,
                                            _get_org_remove_member_args(args))
        if args.brief:
            print(result["id"])
        else:
            print(fill("Removed {u} from {o}".format(u=user_id, o=args.org_id)))
            print(fill("Removed {u} from the following projects:".format(u=user_id)))
            if len(result["projects"].keys()) != 0:
                for project_id in result["projects"].keys():
                    print("\t{p}".format(p=project_id))
            else:
                print("\tNone")
            print(fill("Removed {u} from the following apps:".format(u=user_id)))
            if len(result["apps"].keys()) != 0:
                for app_id in result["apps"].keys():
                    print("\t{a}".format(a=app_id))
            else:
                print("\tNone")
    else:
        print(fill("Aborting removal of {u} from {o}".format(u=user_id, o=args.org_id)))
Exemplo n.º 38
0
def get_io_field(io_hash, defaults={}, delim='=', highlight_fields=[]):
    if io_hash is None:
        return '-'
    if len(io_hash) == 0 and len(defaults) == 0:
        return '-'
    def highlight_value(key, value):
        if key in highlight_fields:
            return YELLOW() + value + ENDC()
        else:
            return value
    if get_delimiter() is not None:
        return ('\n' + get_delimiter()).join([(key + delim + highlight_value(key, io_val_to_str(value))) for key, value in io_hash.items()] +
                                             [('[' + key + delim + io_val_to_str(value) + ']') for key, value in defaults.items()])
    else:
        return ('\n').join([fill(key + ' ' + delim + ' ' + highlight_value(key, io_val_to_str(value)),
                                 initial_indent=' '*16,
                                 subsequent_indent=' '*17,
                                 break_long_words=False) for key, value in io_hash.items()] +
                           [fill('[' + key + ' ' + delim + ' ' + io_val_to_str(value) + ']',
                                 initial_indent=' '*16,
                                 subsequent_indent=' '*17,
                                 break_long_words=False) for key, value in defaults.items()])[16:]
Exemplo n.º 39
0
def wait_for_depends_on(depends_on, all_job_outputs):
    # Wait for depends_on and any data objects in the input to close
    if len(depends_on) > 0:
        print fill(
            'Processing dependsOn and any DNAnexus links to closing objects in the input'
        )
        for an_id in depends_on:
            try:
                print '  Waiting for ' + an_id + '...'
                if an_id.startswith('localjob'):
                    if all_job_outputs.get(an_id) is None:
                        raise Exception(
                            'Job ' + an_id +
                            ' could not be found in local finished jobs')
                elif an_id.startswith('job'):
                    dxjob = dxpy.DXJob(an_id)
                    dxjob.wait_on_done()
                else:
                    handler = dxpy.get_handler(an_id)
                    desc = handler.describe()
                    handler._wait_on_close()
            except BaseException as e:
                raise Exception('Could not wait for ' + an_id + ': ' + str(e))
Exemplo n.º 40
0
def job_output_to_str(job_output,
                      prefix='\n',
                      title="Output: ",
                      title_len=None):
    if len(job_output) == 0:
        return prefix + title + "-"
    else:
        if title_len is None:
            title_len = len(title)
        return prefix + title + (prefix + ' ' * title_len).join([
            fill(key + ' = ' + io_val_to_str(value),
                 subsequent_indent=' ' * 9,
                 break_long_words=False) for key, value in job_output.items()
        ])
Exemplo n.º 41
0
def add_membership(args):
    try:
        dxpy.api.org_get_member_access(args.org_id, {"user": "******" + args.username})
    except:
        pass
    else:
        raise DXCLIError("Cannot add a user who is already a member of the org")

    dxpy.api.org_invite(args.org_id, get_org_invite_args(args))

    if args.brief:
        print("org-" + args.org_id)
    else:
        print(fill("Invited user-{u} to {o}".format(u=args.username, o=args.org_id)))
Exemplo n.º 42
0
def add_membership(args):
    try:
        dxpy.api.org_get_member_access(args.org_id,
                                       {"user": "******" + args.username})
    except:
        pass
    else:
        raise DXCLIError("Cannot add a user who is already a member of the org")

    dxpy.api.org_invite(args.org_id, get_org_invite_args(args))

    if args.brief:
        print("org-" + args.org_id)
    else:
        print(fill("Invited user-{u} to {o}".format(u=args.username,
                                                    o=args.org_id)))
Exemplo n.º 43
0
def stage_to_job_refs(x, launched_jobs):
    ''' Used by run() to parse stage inputs bound to other stages when executing a workflow '''
    if isinstance(x, collections.Mapping):
        if "connectedTo" in x:
            if x['connectedTo']['stage'] in launched_jobs and launched_jobs[x['connectedTo']['stage']] is not None:
                return {'job': launched_jobs[x['connectedTo']['stage']].get_id(), "field": x['connectedTo']['output']}
            else:
                # TODO: Make this better
                sys.stderr.write(fill("Error: An input is connected to a stage that has not yet been launched.  You will need to reorder the stages before they can be run.") + "\n")
                exit(1)
        for key, value in x.iteritems():
            x[key] = stage_to_job_refs(value, launched_jobs)
    elif isinstance(x, list):
        for i in range(len(x)):
            x[i] = stage_to_job_refs(x[i], launched_jobs)
    return x
Exemplo n.º 44
0
def add_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except:
        pass
    else:
        raise DXCLIError("Cannot add a user who is already a member of the org. To update an existing member's permissions, use 'dx update member'")

    dxpy.api.org_invite(args.org_id, get_org_invite_args(user_id, args))

    if args.brief:
        print("org-" + args.org_id)
    else:
        print(fill("Invited {u} to {o}".format(u=user_id, o=args.org_id)))
Exemplo n.º 45
0
def add_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except:
        pass
    else:
        raise DXCLIError("Cannot add a user who is already a member of the org. To update an existing member's permissions, use 'dx update member'")

    dxpy.api.org_invite(args.org_id, get_org_invite_args(user_id, args))

    if args.brief:
        print("org-" + args.org_id)
    else:
        print(fill("Invited {u} to {o}".format(u=user_id, o=args.org_id)))
Exemplo n.º 46
0
def remove_membership(args):
    # Will throw ResourceNotFound of the specified user is not currently a
    # member of the org.
    dxpy.api.org_get_member_access(args.org_id, {"user": "******" + args.username})

    result = dxpy.api.org_remove_member(args.org_id, _get_org_remove_member_args(args))
    if args.brief:
        print(result["id"])
    else:
        print(
            fill(
                "Removed user-{u} from {o}. user-{u} has been removed from the following projects {p}. user-{u} has been removed from the following apps {a}.".format(
                    u=args.username, o=args.org_id, p=result["projects"].keys(), a=result["apps"].keys()
                )
            )
        )
Exemplo n.º 47
0
def update_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        member_access = dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except IndexError:
        raise DXCLIError("Cannot update a user who is not a member of the org")

    current_level = member_access["level"]

    result = dxpy.api.org_set_member_access(args.org_id,
                                            _get_org_set_member_access_args(args,
                                                                            current_level))
    if args.brief:
        print(result["id"])
    else:
        print(fill("Updated membership of {u} in {o}".format(u=user_id, o=args.org_id)))
Exemplo n.º 48
0
def update_membership(args):
    user_id = get_user_id(args.username_or_user_id)

    try:
        member_access = dxpy.api.org_find_members(args.org_id, {"id": [user_id]})["results"][0]
    except IndexError:
        raise DXCLIError("Cannot update a user who is not a member of the org")

    current_level = member_access["level"]

    result = dxpy.api.org_set_member_access(args.org_id,
                                            _get_org_set_member_access_args(args,
                                                                            current_level))
    if args.brief:
        print(result["id"])
    else:
        print(fill("Updated membership of {u} in {o}".format(u=user_id, o=args.org_id)))
Exemplo n.º 49
0
def get_find_jobs_string(jobdesc,
                         has_children,
                         single_result=False,
                         show_outputs=True):
    '''
    :param jobdesc: hash of job describe output
    :param has_children: whether the job has subjobs to be printed
    :param single_result: whether the job is displayed as a single result or as part of a job tree
    '''
    is_origin_job = jobdesc['parentJob'] is None or single_result
    result = ("* " if is_origin_job and get_delimiter() is None else "")
    canonical_job_name = jobdesc['executableName'] + ":" + jobdesc['function']
    job_name = jobdesc.get('name', '<no name>')
    result += BOLD() + BLUE() + job_name + ENDC()
    if job_name != canonical_job_name and job_name + ":main" != canonical_job_name:
        result += ' (' + canonical_job_name + ')'
    result += DELIMITER(' (') + JOB_STATES(
        jobdesc['state']) + DELIMITER(') ') + jobdesc['id']
    result += DELIMITER('\n' + (u'│ ' if is_origin_job and has_children else
                                ("  " if is_origin_job else "")))
    result += jobdesc['launchedBy'][5:] + DELIMITER(' ')
    result += render_short_timestamp(jobdesc['created'])
    if jobdesc['state'] in [
            'done', 'failed', 'terminated', 'waiting_on_output'
    ]:
        # TODO: Remove this check once all jobs are migrated to have these values
        if 'stoppedRunning' in jobdesc and 'startedRunning' in jobdesc:
            result += " (runtime {r})".format(r=str(
                datetime.timedelta(seconds=int(jobdesc['stoppedRunning'] -
                                               jobdesc['startedRunning']) /
                                   1000)))
    elif jobdesc['state'] == 'running':
        result += " (running for {rt})".format(rt=datetime.timedelta(
            seconds=int(time.time() - jobdesc['startedRunning'] / 1000)))

    if show_outputs:
        prefix = DELIMITER('\n' + (u'│ ' if is_origin_job and has_children else
                                   ("  " if is_origin_job else "")))
        if jobdesc.get("output") != None:
            result += job_output_to_str(jobdesc['output'], prefix=prefix)
        elif jobdesc['state'] == 'failed' and 'failureReason' in jobdesc:
            result += prefix + BOLD() + jobdesc['failureReason'] + ENDC(
            ) + ": " + fill(jobdesc.get('failureMessage', ''),
                            subsequent_indent=prefix.lstrip('\n'))

    return result
Exemplo n.º 50
0
def get_metadata(api_version):
    print ''
    print fill('The ' + BOLD() + 'title' + ENDC() + ', if provided, is what is shown as the name of your app on the website.  It can be any valid UTF-8 string.')
    title = prompt_for_var('Title', '')

    print ''
    print fill('The ' + BOLD() + 'summary' + ENDC() + ' of your app is a short phrase or one-line description of what your app does.  It can be any UTF-8 human-readable string.')
    summary = prompt_for_var('Summary', '')

    print ''
    print fill('The ' + BOLD() + 'description' + ENDC() + ' of your app is a longer piece of text describing your app.  It can be any UTF-8 human-readable string, and it will be interpreted using Markdown (see http://daringfireball.net/projects/markdown/syntax/ for more details).')
    description = prompt_for_var('Description', '')

    return title, summary, description
Exemplo n.º 51
0
def get_metadata(api_version):
    print ''
    print fill('The ' + BOLD() + 'title' + ENDC() + ', if provided, is what is shown as the name of your app on the website.  It can be any valid UTF-8 string.')
    title = prompt_for_var('Title', '')

    print ''
    print fill('The ' + BOLD() + 'summary' + ENDC() + ' of your app is a short phrase or one-line description of what your app does.  It can be any UTF-8 human-readable string.')
    summary = prompt_for_var('Summary', '')

    print ''
    print fill('The ' + BOLD() + 'description' + ENDC() + ' of your app is a longer piece of text describing your app.  It can be any UTF-8 human-readable string, and it will be interpreted using Markdown (see http://daringfireball.net/projects/markdown/syntax/ for more details).')
    description = prompt_for_var('Description', '')

    return title, summary, description
Exemplo n.º 52
0
def get_name(default=None):
    print fill('The ' + BOLD() + 'name' + ENDC() + ' of your app must be unique on the DNAnexus platform.  After creating your app for the first time, you will be able to publish new versions using the same app name.  App names are restricted to alphanumeric characters (a-z, A-Z, 0-9), and the characters ".", "_", and "-".')
    name_pattern = re.compile('^[a-zA-Z0-9._-]+$')
    while True:
        name = prompt_for_var('App Name', default)
        if name_pattern.match(name) is None:
            print fill('The name of your app must match /^[a-zA-Z0-9._-]+$/')
        else:
            if os.path.exists(name):
                if os.path.isdir(name):
                    remove_dir = prompt_for_yn('The directory %s already exists.  Would you like to remove all of its contents and create a new directory in its place?' % name)
                    if remove_dir:
                        shutil.rmtree(name)
                        print fill('Replacing all contents of directory %s...' % name)
                    else:
                        print ''
                        continue
                else:
                    print fill('A file named %s already exists.  Please choose another name or rename your file')
                    continue
            break
    return name
Exemplo n.º 53
0
def get_name(default=None):
    print fill('The ' + BOLD() + 'name' + ENDC() + ' of your app must be unique on the DNAnexus platform.  After creating your app for the first time, you will be able to publish new versions using the same app name.  App names are restricted to alphanumeric characters (a-z, A-Z, 0-9), and the characters ".", "_", and "-".')
    name_pattern = re.compile('^[a-zA-Z0-9._-]+$')
    while True:
        name = prompt_for_var('App Name', default)
        if name_pattern.match(name) is None:
            print fill('The name of your app must match /^[a-zA-Z0-9._-]+$/')
        else:
            if os.path.exists(name):
                if os.path.isdir(name):
                    remove_dir = prompt_for_yn('The directory %s already exists.  Would you like to remove all of its contents and create a new directory in its place?' % name)
                    if remove_dir:
                        shutil.rmtree(name)
                        print fill('Replacing all contents of directory %s...' % name)
                    else:
                        print ''
                        continue
                else:
                    print fill('A file named %s already exists.  Please choose another name or rename your file')
                    continue
            break
    return name
Exemplo n.º 54
0
def format_choices_or_suggestions(header,
                                  items,
                                  obj_class,
                                  initial_indent=' ' * 8,
                                  subsequent_indent=' ' * 10):
    if obj_class.startswith('array:'):
        obj_class = obj_class[6:]

    def format_data_object_reference(item):
        if dxpy.is_dxlink(item):
            # Bare dxlink
            obj_id, proj_id = dxpy.get_dxlink_ids(item)
            return (proj_id + ":" if proj_id else '') + obj_id
        if dxpy.is_dxlink(item.get('value')):
            # value is set
            obj_id, proj_id = dxpy.get_dxlink_ids(item['value'])
            return (proj_id + ":" if proj_id else '') + obj_id + (
                ' (%s)' % item['name'] if item.get('name') else '')
        if item.get('project') and item.get('path'):
            # project and folder path
            return item['project'] + ':' + item[
                'path'] + "/" + obj_class + "-*" + (' (%s)' % item['name'] if
                                                    item.get('name') else '')
        return str(item)

    showing_data_objects = obj_class in dx_data_classes

    if showing_data_objects:
        return initial_indent + header + ''.join(
            '\n' + subsequent_indent + format_data_object_reference(item)
            for item in items)
    else:
        # TODO: in interactive prompts the quotes here may be a bit
        # misleading. Perhaps it should be a separate mode to print
        # "interactive-ready" suggestions.
        return fill(header + ' ' +
                    ', '.join([pipes.quote(unicode(item)) for item in items]),
                    initial_indent=initial_indent,
                    subsequent_indent=subsequent_indent)
Exemplo n.º 55
0
 def prompt_for_optional_inputs(self):
     while True:
         print '\n' + fill(
             'Select an optional parameter to set by its # (^D or <ENTER> to finish):'
         ) + '\n'
         for i in range(len(self.optional_inputs)):
             opt_str = ' [' + str(i) + '] ' + \
                 get_optional_input_str(self.input_spec[self.optional_inputs[i]])
             if self.optional_inputs[i] in self.inputs:
                 opt_str += ' [=' + GREEN()
                 opt_str += json.dumps(self.inputs[self.optional_inputs[i]])
                 opt_str += ENDC() + ']'
             elif 'default' in self.input_spec[self.optional_inputs[i]]:
                 opt_str += ' [default=' + json.dumps(self.input_spec[
                     self.optional_inputs[i]]['default']) + ']'
             print opt_str
         print ""
         try:
             while True:
                 selected = raw_input('Optional param #: ')
                 if selected == '':
                     return
                 try:
                     opt_num = int(selected)
                     if opt_num < 0 or opt_num >= len(self.optional_inputs):
                         raise ValueError(
                             'Error: Selection is out of range')
                     break
                 except ValueError as details:
                     print unicode(details)
                     continue
         except EOFError:
             return
         try:
             self.inputs[
                 self.optional_inputs[opt_num]] = self.prompt_for_input(
                     self.optional_inputs[opt_num])
         except:
             pass
Exemplo n.º 56
0
def download_one_file(project, file_desc, dest_filename, args):
    if not args.overwrite:
        if os.path.exists(dest_filename):
            err_exit(fill('Error: path "' + dest_filename + '" already exists but -f/--overwrite was not set'))

    if file_desc['class'] != 'file':
        print("Skipping non-file data object {name} ({id})".format(**file_desc), file=sys.stderr)
        return

    if file_desc['state'] != 'closed':
        print("Skipping file {name} ({id}) because it is not closed".format(**file_desc), file=sys.stderr)
        return

    try:
        show_progress = args.show_progress
    except AttributeError:
        show_progress = False

    try:
        dxpy.download_dxfile(file_desc['id'], dest_filename, show_progress=show_progress, project=project)
    except:
        err_exit()
Exemplo n.º 57
0
def stage_to_job_refs(x, launched_jobs):
    ''' Used by run() to parse stage inputs bound to other stages when executing a workflow '''
    if isinstance(x, collections.Mapping):
        if "connectedTo" in x:
            if x['connectedTo']['stage'] in launched_jobs and launched_jobs[
                    x['connectedTo']['stage']] is not None:
                return {
                    'job': launched_jobs[x['connectedTo']['stage']].get_id(),
                    "field": x['connectedTo']['output']
                }
            else:
                # TODO: Make this better
                sys.stderr.write(
                    fill(
                        "Error: An input is connected to a stage that has not yet been launched.  You will need to reorder the stages before they can be run."
                    ) + "\n")
                exit(1)
        for key, value in x.iteritems():
            x[key] = stage_to_job_refs(value, launched_jobs)
    elif isinstance(x, list):
        for i in range(len(x)):
            x[i] = stage_to_job_refs(x[i], launched_jobs)
    return x
Exemplo n.º 58
0
def run_one_entry_point(job_id, function, input_hash, run_spec, depends_on, name=None):
    '''
    :param job_id: job ID of the local job to run
    :type job_id: string
    :param function: function to run
    :type function: string
    :param input_hash: input for the job (may include job-based object references)
    :type input_hash: dict
    :param run_spec: run specification from the dxapp.json of the app
    :type run_spec: dict

    Runs the specified entry point and retrieves the job's output,
    updating job_outputs.json (in $DX_TEST_JOB_HOMEDIRS) appropriately.
    '''
    print('======')

    job_homedir = os.path.join(environ['DX_TEST_JOB_HOMEDIRS'], job_id)

    job_env = environ.copy()
    job_env['HOME'] = os.path.join(environ['DX_TEST_JOB_HOMEDIRS'], job_id)

    all_job_outputs_path = os.path.join(environ['DX_TEST_JOB_HOMEDIRS'], 'job_outputs.json')

    with open(all_job_outputs_path, 'r') as fd:
        all_job_outputs = json.load(fd, object_pairs_hook=collections.OrderedDict)

    if isinstance(name, basestring):
        name += ' (' + job_id + ':' + function + ')'
    else:
        name = job_id + ':' + function
    job_name = BLUE() + BOLD() + name + ENDC()
    print(job_name)

    # Resolve local job-based object references
    try:
        resolve_job_references(input_hash, all_job_outputs)
    except Exception as e:
        exit_with_error(job_name + ' ' + JOB_STATES('failed') + ' when resolving input:\n' + fill(str(e)))

    # Get list of non-closed data objects in the input that appear as
    # DNAnexus links; append to depends_on
    if depends_on is None:
        depends_on = []
    get_implicit_depends_on(input_hash, depends_on)

    try:
        wait_for_depends_on(depends_on, all_job_outputs)
    except Exception as e:
        exit_with_error(job_name + ' ' + JOB_STATES('failed') + ' when processing depends_on:\n' + fill(str(e)))

    # Save job input to job_input.json
    with open(os.path.join(job_homedir, 'job_input.json'), 'wb') as fd:
        json.dump(input_hash, fd, indent=4)
        fd.write(b'\n')

    print(job_output_to_str(input_hash, title=(BOLD() + 'Input: ' + ENDC()),
                            title_len=len("Input: ")).lstrip())

    if run_spec['interpreter'] == 'bash':
        # Save job input to env vars
        env_path = os.path.join(job_homedir, 'environment')
        with open(env_path, 'w') as fd:
            # Following code is what is used to generate env vars on the remote worker
            fd.write("\n".join(["export {k}=( {vlist} )".format(k=k, vlist=" ".join([pipes.quote(vitem if isinstance(vitem, basestring) else json.dumps(vitem)) for vitem in v])) if isinstance(v, list) else "export {k}={v}".format(k=k, v=pipes.quote(v if isinstance(v, basestring) else json.dumps(v))) for k, v in input_hash.items()]))

    print(BOLD() + 'Logs:' + ENDC())
    start_time = datetime.datetime.now()
    if run_spec['interpreter'] == 'bash':
        script = '''
          cd hotexamples_com;
          . {env_path};
          . {code_path};
          if [[ $(type -t {function}) == "function" ]];
          then {function};
          else echo "$0: Global scope execution complete. Not invoking entry point function {function} because it was not found" 1>&2;
          fi'''.format(homedir=pipes.quote(job_homedir),
                       env_path=pipes.quote(os.path.join(job_env['HOME'], 'environment')),
                       code_path=pipes.quote(environ['DX_TEST_CODE_PATH']),
                       function=function)
        invocation_args = ['bash', '-c', '-e'] + (['-x'] if environ.get('DX_TEST_X_FLAG') else []) + [script]
    elif run_spec['interpreter'] == 'python2.7':
        script = '''#!/usr/bin/env python
import os
os.chdir(hotexamples_com)

{code}

import dxpy, json
if dxpy.utils.exec_utils.RUN_COUNT == 0:
    dxpy.run()
'''.format(homedir=repr(job_homedir),
           code=run_spec['code'])

        job_env['DX_TEST_FUNCTION'] = function
        invocation_args = ['python', '-c', script]

    if USING_PYTHON2:
        invocation_args = [arg.encode(sys.stdout.encoding) for arg in invocation_args]
        env = {k: v.encode(sys.stdout.encoding) for k, v in job_env.items()}
    else:
        env = job_env

    fn_process = subprocess.Popen(invocation_args, env=env)

    fn_process.communicate()
    end_time = datetime.datetime.now()

    if fn_process.returncode != 0:
        exit_with_error(job_name + ' ' + JOB_STATES('failed') + ', exited with error code ' + str(fn_process.returncode) + ' after ' + str(end_time - start_time))

    # Now updating job output aggregation file with job's output
    job_output_path = os.path.join(job_env['HOME'], 'job_output.json')
    if os.path.exists(job_output_path):
        try:
            with open(job_output_path, 'r') as fd:
                job_output = json.load(fd, object_pairs_hook=collections.OrderedDict)
        except Exception as e:
            exit_with_error('Error: Could not load output of ' + job_name + ':\n' + fill(str(e.__class__) + ': ' + str(e)))
    else:
        job_output = {}

    print(job_name + ' -> ' + GREEN() + 'finished running' + ENDC() + ' after ' + str(end_time - start_time))
    print(job_output_to_str(job_output, title=(BOLD() + "Output: " + ENDC()),
                            title_len=len("Output: ")).lstrip())

    with open(os.path.join(environ['DX_TEST_JOB_HOMEDIRS'], 'job_outputs.json'), 'r') as fd:
        all_job_outputs = json.load(fd, object_pairs_hook=collections.OrderedDict)
    all_job_outputs[job_id] = job_output

    # Before dumping, see if any new jbors should be resolved now
    for other_job_id in all_job_outputs:
        if all_job_outputs[other_job_id] is None:
            # Skip if job is not done yet (true for ancestor jobs)
            continue
        resolve_job_references(all_job_outputs[other_job_id], all_job_outputs, should_resolve=False)

    with open(os.path.join(environ['DX_TEST_JOB_HOMEDIRS'], 'job_outputs.json'), 'wb') as fd:
        json.dump(all_job_outputs, fd, indent=4)
        fd.write(b'\n')
Exemplo n.º 59
0
def update_stage(args):
    # get workflow
    workflow_id, project = get_workflow_id_and_project(args.workflow)
    dxworkflow = dxpy.DXWorkflow(workflow_id, project=project)

    # process instance type
    try_call(process_instance_type_arg, args)

    initial_edit_version = dxworkflow.editVersion

    try:
        args.stage = int(args.stage)
    except:
        pass

    if not any([
            args.executable, args.name, args.no_name, args.output_folder,
            args.relative_output_folder, args.input, args.input_json,
            args.filename, args.instance_type
    ]):
        print('No updates requested; none made')
        return

    new_exec_handler = None
    if args.executable is not None:
        # get executable
        new_exec_handler = try_call(dxpy.utils.resolver.get_exec_handler,
                                    args.executable, args.alias)
        exec_inputs = dxpy.cli.exec_io.ExecutableInputs(new_exec_handler)
        try_call(exec_inputs.update_from_args, args, require_all_inputs=False)
        stage_input = exec_inputs.inputs
    elif args.input or args.input_json or args.filename:
        # input is updated, so look up the existing one
        existing_exec_handler = dxpy.utils.resolver.get_exec_handler(
            dxworkflow.get_stage(args.stage)['executable'])
        exec_inputs = dxpy.cli.exec_io.ExecutableInputs(existing_exec_handler)
        try_call(exec_inputs.update_from_args, args, require_all_inputs=False)
        stage_input = exec_inputs.inputs
    else:
        stage_input = None

    # get folder path
    folderpath = None
    if args.output_folder is not None:
        try:
            _ignore, folderpath, _none = resolve_path(args.output_folder,
                                                      expected='folder')
        except:
            folderpath = args.output_folder
    elif args.relative_output_folder is not None:
        folderpath = args.relative_output_folder

    try:
        dxworkflow.update_stage(args.stage,
                                executable=new_exec_handler,
                                force=args.force,
                                name=args.name,
                                unset_name=args.no_name,
                                folder=folderpath,
                                stage_input=stage_input,
                                instance_type=args.instance_type,
                                edit_version=initial_edit_version)
    except InvalidState as e:
        if "compatible" in str(e):
            err_msg = 'The requested executable could not be verified as a compatible replacement'
            if 'incompatibilities' in e.details and e.details[
                    'incompatibilities']:
                err_msg += ' for the following reasons:\n'
                err_msg += '\n'.join([
                    printing.fill(incompat,
                                  initial_indent='- ',
                                  subsequent_indent='  ')
                    for incompat in e.details['incompatibilities']
                ])
            else:
                err_msg += '.'
            err_msg += '\nRerun with --force to replace the executable anyway'
            err_exit(expected_exceptions=DXCLIError,
                     exception=DXCLIError(err_msg))
        else:
            try_call_err_exit()
    except:
        try_call_err_exit()
Exemplo n.º 60
0
def download(args):
    folders_to_get, files_to_get, count = collections.defaultdict(
        list), collections.defaultdict(list), 0
    foldernames, filenames = [], []
    for path in args.paths:
        # Attempt to resolve name. If --all is given or the path looks like a glob, download all matches.
        # Otherwise, the resolver will display a picker (or error out if there is no tty to display to).
        resolver_kwargs = {'allow_empty_string': False}
        if args.all or _is_glob(path):
            resolver_kwargs.update({'allow_mult': True, 'all_mult': True})

        project, folderpath, matching_files = try_call(resolve_existing_path,
                                                       path, **resolver_kwargs)
        if matching_files is None:
            matching_files = []
        elif not isinstance(matching_files, list):
            matching_files = [matching_files]

        # TODO: this could also be returned as metadata by resolve_path since
        # resolve_path knows these things in some circumstances
        path_has_explicit_proj = is_project_explicit(path) or is_jbor_str(path)

        if is_jbor_str(path):
            assert len(matching_files) == 1
            project = matching_files[0]["describe"]["project"]

        matching_folders = []
        # project may be none if path is an ID and there is no project context
        if project is not None:
            colon_pos = get_first_pos_of_char(":", path)
            if colon_pos >= 0:
                path = path[colon_pos + 1:]
            abs_path, strip_prefix = _rel2abs(path, project)
            parent_folder = os.path.dirname(abs_path)
            folder_listing = dxpy.list_subfolders(project,
                                                  parent_folder,
                                                  recurse=False)
            matching_folders = pathmatch.filter(folder_listing, abs_path)
            if '/' in matching_folders and len(matching_folders) > 1:
                # The list of subfolders is {'/', '/A', '/B'}.
                # Remove '/', otherwise we will download everything twice.
                matching_folders.remove('/')

        if len(matching_files) == 0 and len(matching_folders) == 0:
            err_exit(
                fill(
                    'Error: {path} is neither a file nor a folder name'.format(
                        path=path)))

        # If the user did not explicitly provide the project, don't pass any
        # project parameter to the API call but continue with the download.
        if not path_has_explicit_proj:
            project = dxpy.DXFile.NO_PROJECT_HINT

        # If the user explicitly provided the project and it doesn't contain
        # the files, don't allow the download.
        #
        # If length of matching_files is 0 then we're only downloading folders
        # so skip this logic since the files will be verified in the API call.
        if len(matching_files) > 0 and path_has_explicit_proj and not \
                any(object_exists_in_project(f['describe']['id'], project) for f in matching_files):
            err_exit(
                fill(
                    'Error: specified project does not contain specified file object'
                ))

        files_to_get[project].extend(matching_files)
        folders_to_get[project].extend(
            ((f, strip_prefix) for f in matching_folders))
        count += len(matching_files) + len(matching_folders)

        filenames.extend(f["describe"]["name"] for f in matching_files)
        foldernames.extend(f[len(strip_prefix):].lstrip('/')
                           for f in matching_folders)

    if len(filenames) > 0 and len(foldernames) > 0:
        name_conflicts = set(filenames) & set(foldernames)
        if len(name_conflicts) > 0:
            msg = "Error: The following paths are both file and folder names, and " \
                  "cannot be downloaded to the same destination: "
            msg += ", ".join(sorted(name_conflicts))
            err_exit(fill(msg))

    if args.output is None:
        destdir, dest_filename = os.getcwd(), None
    elif count > 1:
        if not os.path.exists(args.output):
            err_exit(
                fill(
                    "Error: When downloading multiple objects, --output must be an existing directory"
                ))
        destdir, dest_filename = args.output, None
    elif os.path.isdir(args.output):
        destdir, dest_filename = args.output, None
    elif args.output.endswith('/'):
        err_exit(
            fill("Error: {path} could not be found".format(path=args.output)))
    else:
        destdir, dest_filename = os.getcwd(), args.output

    _download_folders(folders_to_get, destdir, args)
    _download_files(files_to_get, destdir, args, dest_filename=dest_filename)