async def main():
    args = vars(parser.parse_args())
    db = DataStore(args['dbDir'], args['debug'])
    await db.load()

    inputs = {}
    r = re.compile(args['label'])
    if r.groups == 0:
        # We're in normal mode; one path per argument
        inputs[args['label']] = {}
        for arg in ['input', 'tree', 'performance', 'graph', 'otf2', 'physl', 'python', 'cpp']:
            if len(args[arg]) == 1:
                inputs[args['label']][arg] = args[arg][0]
            elif len(args[arg]) > 1:
                raise Exception('To use glob patterns, please provide a regular expression with one capture group as a --label argument')
        if not inputs[args['label']]:
            raise Exception('At least one of: --input, --tree, --performance, --graph, --otf2, --physl, --python, and/or --cpp is required')
    elif r.groups == 1:
        # We're in globbing mode; we can expect many files per argument, and
        # --label should be a regular expression that matches input files to
        # their label The only (possible) exception are code files: if only
        # one is provided, use it for all labels (otherwise, expect it to match
        # the regular expression as well)
        singlePhysl = args['physl'][0] if len(args['physl']) == 1 else None
        singlePython = args['python'][0] if len(args['python']) == 1 else None
        singleCpp = args['cpp'][0] if len(args['cpp']) == 1 else None
        for arg in ['input', 'tree', 'performance', 'graph', 'otf2', 'physl', 'python', 'cpp']:
            if arg == 'physl' and singlePhysl is not None:
                continue
            if arg == 'python' and singlePython is not None:
                continue
            if arg == 'cpp' and singleCpp is not None:
                continue
            for path in args[arg]:
                m = r.match(path)
                if m is None:
                    raise Exception('--label pattern could not identify a label for file: %s' % path)
                label = m[1].replace('/', '')
                inputs[label] = inputs.get(label, {})
                if arg in inputs[label]:
                    raise Exception('--label pattern found duplicate matches for --%s:\n%s\n%s' % (arg, inputs[label][arg], path))
                inputs[label][arg] = path
        for label in inputs:
            if singlePhysl is not None:
                inputs[label]['physl'] = singlePhysl
            if singlePython is not None:
                inputs[label]['python'] = singlePython
            if singleCpp is not None:
                inputs[label]['cpp'] = singleCpp
    else:
        raise Exception('Too many capturing groups in the --label argument')

    for label, paths in inputs.items():
        if 'input' in paths and ('tree' in paths or 'performance' in paths or 'graph' in paths):
            raise Exception('Don\'t use --input with --tree, --performance, or --graph for the same --label: %s' % label)
        try:
            await logToConsole('#################' + ''.join(['#' for x in range(len(label))]))
            await logToConsole('Adding data for: %s' % label)

            # Initialize the dataset
            db.createDataset(label)

            # Handle performance files
            if 'performance' in paths:
                with open(paths['performance'], 'r') as file:
                    await db.processCsvFile(label, file)

            # Handle tree files:
            if 'tree' in paths:
                with open(paths['tree'], 'r') as file:
                    await db.processNewickFile(label, file)

            # Handle graph files:
            if 'graph' in paths:
                with open(paths['graph'], 'r') as file:
                    await db.processDotFile(label, file)

            # Handle stdout from phylanx
            if 'input' in paths:
                with open(paths['input'], 'r') as file:
                    await db.processPhylanxLogFile(label, file)

            # Handle code files
            for codeType in ['physl', 'python', 'cpp']:
                if codeType in paths:
                    with open(paths[codeType], 'r') as file:
                        await db.processCodeFile(label, file, codeType)

            # Handle otf2
            if 'otf2' in paths:
                await db.processOtf2(label, FakeFile(paths['otf2']), args['events'])

            # Save all the data
            await db.save(label)
        except: #pylint: disable=W0702
            await logToConsole('Error encountered; purging corrupted data for: %s' % label)
            db.purgeDataset(label)
            raise