예제 #1
0
def main(argv, cfg):
	usage = "%(prog)s [-a | [workdir [workdir [...]]]"
	parser = ArgumentParser(usage=usage, prog=argv.pop(0))
	parser.add_argument('-a', '--all', action='store_true', help="list all workdirs")
	parser.add_argument('workdirs', nargs='*', default=[])
	args = parser.parse_args(argv)

	if args.all:
		args.workdirs.extend(sorted(cfg.workdirs))

	if not args.workdirs:
		for wd in sorted(cfg.workdirs):
			print(wd)
		return

	for name in args.workdirs:
		if name not in cfg.workdirs:
			print("No such workdir:", name, file=sys.stderr)
			continue
		known = call(cfg.url + '/workdir/' + url_quote(name))
		for jid in workdir_jids(cfg, name):
			show_job(known, jid)

		try:
			latest = os.readlink(os.path.join(cfg.workdirs[name], name + '-LATEST'))
		except OSError:
			latest = None
		if latest:
			show_job(known, jid, name + '-LATEST')
예제 #2
0
def cmd_abort(argv):
	parser = ArgumentParser(prog=argv.pop(0))
	parser.add_argument('-q', '--quiet', action='store_true', help="no output")
	args = parser.parse_args(argv)
	from accelerator.build import Automata
	a = Automata(cfg.url)
	res = a.abort()
	if not args.quiet:
		print("Killed %d running job%s." % (res.killed, '' if res.killed == 1 else 's'))
예제 #3
0
def main(argv, cfg):
	parser = ArgumentParser(
		prog=argv.pop(0),
		usage="%(prog)s [options] [script]",
		formatter_class=RawTextHelpFormatter,
	)
	parser.add_argument('-f', '--flags',    default='',          help="comma separated list of flags", )
	parser.add_argument('-q', '--quick',    action='store_true', help="skip method updates and checking workdirs for new jobs", )
	parser.add_argument('-c', '--concurrency', action='append',  metavar='SPEC', help="set max concurrency for methods, either method=N\nor just N to set for all other methods", )
	parser.add_argument('-w', '--workdir',  default=None,        help="build in this workdir\nset_workdir() and workdir= override this.", )
	parser.add_argument('-W', '--just_wait',action='store_true', help="just wait for running job, don't run any build script", )
	parser.add_argument('-F', '--fullpath', action='store_true', help="print full path to jobdirs")
	parser.add_argument('--verbose',        default='status',    help="verbosity style {no, status, dots, log}")
	parser.add_argument('--quiet',          action='store_true', help="same as --verbose=no")
	parser.add_argument('--horizon',        default=None,        help="time horizon - dates after this are not visible in\nurd.latest")
	parser.add_argument('script',           default='build'   ,  help="build script to run. default \"build\".\nsearches under all method directories in alphabetical\norder if it does not contain a dot.\nprefixes build_ to last element unless specified.\npackage name suffixes are ok.\nso for example \"test_methods.tests\" expands to\n\"accelerator.test_methods.build_tests\".", nargs='?')

	options = parser.parse_args(argv)

	if '.' in options.script:
		options.package, options.script = options.script.rsplit('.', 1)
	else:
		options.package = None

	options.verbose = {'no': False, 'status': True, 'dots': 'dots', 'log': 'log'}[options.verbose]
	if options.quiet: options.verbose = False

	concurrency_map = {}
	for v in options.concurrency or ():
		if v.isnumeric():
			concurrency_map['-default-'] = int(v)
		else:
			try:
				method, v = v.split('=', 1)
				concurrency_map[method] = int(v)
			except ValueError:
				raise Exception('Bad concurrency spec %r' % (v,))
	options.concurrency_map = concurrency_map

	try:
		run_automata(options, cfg)
		return 0
	except (JobError, ServerError):
		# If it's a JobError we don't care about the local traceback,
		# we want to see the job traceback, and maybe know what line
		# we built the job on.
		# If it's a ServerError we just want the line and message.
		print_minimal_traceback()
	except Exception:
		# For the rest we still don't want to see stuff from this
		# file and earlier.
		print_user_part_traceback()
	return 1
예제 #4
0
파일: urd.py 프로젝트: exaxorg/accelerator
def main(argv, cfg):
    global authdict, allow_passwordless, db

    parser = ArgumentParser(prog=argv.pop(0))
    parser.add_argument(
        '--path',
        type=str,
        default='urd.db',
        help=
        'database directory (can be relative to project directory) (default: urd.db)',
    )
    parser.add_argument('--allow-passwordless',
                        action='store_true',
                        help='accept any pass for users not in passwd.')
    parser.add_argument('--quiet', action='store_true', help='less chatty.')
    args = parser.parse_args(argv)
    if not args.quiet:
        print('-' * 79)
        print(args)
        print()

    auth_fn = os.path.join(args.path, 'passwd')
    authdict = readauth(auth_fn)
    allow_passwordless = args.allow_passwordless
    if not authdict and not args.allow_passwordless:
        raise Exception(
            'No users in %r and --allow-passwordless not specified.' %
            (auth_fn, ))
    db = DB(args.path, not args.quiet)

    bottle.install(jsonify)

    kw = dict(debug=False,
              reloader=False,
              quiet=args.quiet,
              server=WaitressServer)
    listen = cfg.urd_listen
    if not listen:
        raise Exception('urd not configured in this project')
    if isinstance(listen, tuple):
        kw['host'], kw['port'] = listen
    else:
        from accelerator.server import check_socket
        check_socket(listen)
        kw['host'] = listen
        kw['port'] = 0
    bottle.run(**kw)
예제 #5
0
def main(argv):
	from os import makedirs, listdir, chdir
	from os.path import exists, join, realpath
	from sys import version_info
	from argparse import RawTextHelpFormatter
	from accelerator.compat import ArgumentParser
	from accelerator.error import UserError
	from accelerator.extras import DotDict

	parser = ArgumentParser(
		prog=argv.pop(0),
		description=r'''
			creates an accelerator project directory.
			defaults to the current directory.
			creates accelerator.conf, a method dir, a workdir and result dir.
			both the method directory and workdir will be named <NAME>,
			"dev" by default.
		'''.replace('\t', ''),
		formatter_class=RawTextHelpFormatter,
	)
	parser.add_argument('--slices', default=None, type=int, help='override slice count detection')
	parser.add_argument('--name', default='dev', help='name of method dir and workdir, default "dev"')
	parser.add_argument('--input', default='# /some/path where you want import methods to look.', help='input directory')
	parser.add_argument('--force', action='store_true', help='go ahead even though directory is not empty, or workdir\nexists with incompatible slice count')
	parser.add_argument('--tcp', default=False, metavar='HOST/PORT', nargs='?', help='listen on TCP instead of unix sockets.\nspecify HOST (can be IP) to listen on that host\nspecify PORT to use range(PORT, PORT + 3)\nspecify both as HOST:PORT')
	parser.add_argument('--no-git', action='store_true', help='don\'t create git repository')
	parser.add_argument('directory', default='.', help='project directory to create. default "."', metavar='DIR', nargs='?')
	options = parser.parse_args(argv)

	assert options.name
	assert '/' not in options.name

	if options.tcp is False:
		listen = DotDict(
			board='.socket.dir/board',
			server='.socket.dir/server',
			urd='.socket.dir/urd',
		)
	else:
		hostport = options.tcp or ''
		if hostport.endswith(']'): # ipv6
			host, port = hostport, None
		elif ':' in hostport:
			host, port = hostport.rsplit(':', 1)
		elif hostport.isdigit():
			host, port = '', hostport
		else:
			host, port = hostport, None
		if port:
			port = int(port)
		else:
			port = find_free_ports(0x3000, 0x8000)
		listen = DotDict(
			server='%s:%d' % (host, port,),
			board='%s:%d' % (host, port + 1,),
			urd='%s:%d' % (host, port + 2,),
		)

	if not options.input.startswith('#'):
		options.input = quote(realpath(options.input))
	prefix = realpath(options.directory)
	workdir = join(prefix, 'workdirs', options.name)
	slices_conf = join(workdir, '.slices')
	try:
		with open(slices_conf, 'r') as fh:
			workdir_slices = int(fh.read())
	except IOError:
		workdir_slices = None
	if workdir_slices and options.slices is None:
		options.slices = workdir_slices
	if options.slices is None:
		from multiprocessing import cpu_count
		options.slices = cpu_count()
	if workdir_slices and workdir_slices != options.slices and not options.force:
		raise UserError('Workdir %r has %d slices, refusing to continue with %d slices' % (workdir, workdir_slices, options.slices,))

	if not options.force and exists(options.directory) and listdir(options.directory):
		raise UserError('Directory %r is not empty.' % (options.directory,))
	if not exists(options.directory):
		makedirs(options.directory)
	chdir(options.directory)
	for dir_to_make in ('.socket.dir', 'urd.db',):
		if not exists(dir_to_make):
			makedirs(dir_to_make, 0o750)
	for dir_to_make in (workdir, 'results',):
		if not exists(dir_to_make):
			makedirs(dir_to_make)
	with open(slices_conf, 'w') as fh:
		fh.write('%d\n' % (options.slices,))
	method_dir = options.name
	if not exists(method_dir):
		makedirs(method_dir)
	with open(join(method_dir, '__init__.py'), 'w') as fh:
		pass
	with open(join(method_dir, 'methods.conf'), 'w') as fh:
		fh.write('example\n')
	with open(join(method_dir, 'a_example.py'), 'w') as fh:
		fh.write(a_example)
	with open(join(method_dir, 'build.py'), 'w') as fh:
		fh.write(build_script)
	with open('accelerator.conf', 'w') as fh:
		fh.write(config_template.format(
			name=quote(options.name),
			slices=options.slices,
			input=options.input,
			major=version_info.major,
			minor=version_info.minor,
			micro=version_info.micro,
			listen=DotDict({k: quote(v) for k, v in listen.items()}),
		))
	if not options.no_git:
		git(method_dir)
예제 #6
0
def main():
    # As of python 3.8 the default start_method is 'spawn' on macOS.
    # This doesn't work for us. 'fork' is fairly unsafe on macOS,
    # but it's better than not working at all. See
    # https://bugs.python.org/issue33725
    # for more information.
    import multiprocessing
    if hasattr(multiprocessing, 'set_start_method'):
        # If possible, make the forkserver (used by database updates) pre-import everthing
        if hasattr(multiprocessing, 'set_forkserver_preload'):
            multiprocessing.set_forkserver_preload(
                ['accelerator', 'accelerator.server'])
        multiprocessing.set_start_method('fork')

    from accelerator import g
    g.running = 'shell'

    from accelerator.autoflush import AutoFlush
    main_argv, argv = split_args(sys.argv[1:])
    sys.stdout = AutoFlush(sys.stdout)
    sys.stderr = AutoFlush(sys.stderr)

    aliases = {
        'cat': 'grep ""',
    }
    aliases.update(parse_user_config() or ())
    while argv and argv[0] in aliases:
        try:
            expanded = shlex.split(aliases[argv[0]])
        except ValueError as e:
            raise ValueError('Failed to expand alias %s (%r): %s' % (
                argv[0],
                aliases[argv[0]],
                e,
            ))
        more_main_argv, argv = split_args(expanded + argv[1:])
        main_argv.extend(more_main_argv)

    epilog = ['commands:', '']
    cmdlen = max(len(cmd) for cmd in COMMANDS)
    template = '  %%%ds  %%s' % (cmdlen, )
    for cmd, func in sorted(COMMANDS.items()):
        epilog.append(template % (
            cmd,
            func.help,
        ))
    epilog.append('')
    epilog.append('aliases:')
    epilog.extend('%s = %s' % item for item in sorted(aliases.items()))
    epilog.append('')
    epilog.append('use %(prog)s <command> --help for <command> usage')
    parser = ArgumentParser(
        usage='%(prog)s [--config CONFIG_FILE] command [args]',
        epilog='\n'.join(epilog),
        formatter_class=RawDescriptionHelpFormatter,
    )
    parser.add_argument('--config',
                        metavar='CONFIG_FILE',
                        help='configuration file')
    parser.add_argument('--version',
                        action='store_true',
                        help='alias for the version command')
    args = parser.parse_args(main_argv)
    if args.version:
        sys.exit(cmd_version(()))
    args.command = argv.pop(0) if argv else None
    if args.command not in COMMANDS:
        parser.print_help(file=sys.stderr)
        print(file=sys.stderr)
        if args.command is not None:
            print('Unknown command "%s"' % (args.command, ), file=sys.stderr)
        sys.exit(2)
    config_fn = args.config
    if args.command == 'init':
        config_fn = False
    cmd = COMMANDS[args.command]
    debug_cmd = getattr(cmd, 'is_debug', False)
    try:
        setup(config_fn, debug_cmd)
        argv.insert(0, '%s %s' % (
            basename(sys.argv[0]),
            args.command,
        ))
        return cmd(argv)
    except UserError as e:
        print(e, file=sys.stderr)
        return 1
    except IOError as e:
        if e.errno == errno.EPIPE and debug_cmd:
            return
        else:
            raise
예제 #7
0
def main(argv, config):
    g.running = 'server'

    parser = ArgumentParser(prog=argv.pop(0))
    parser.add_argument('--debug', action='store_true')
    parser.add_argument(
        '--debuggable',
        action='store_true',
        help=
        'make breakpoint() work in methods. note that this makes a failing method kill the whole server.'
    )
    options = parser.parse_args(argv)

    config.debuggable = options.debuggable

    # all forks belong to the same happy family
    try:
        os.setpgrp()
    except OSError:
        print(
            "Failed to create process group - there is probably already one (daemontools).",
            file=sys.stderr)

    # Set a low (but not too low) open file limit to make
    # dispatch.update_valid_fds faster.
    # The runners will set the highest limit they can
    # before actually running any methods.
    r1, r2 = resource.getrlimit(resource.RLIMIT_NOFILE)
    r1 = min(r1, r2, 1024)
    resource.setrlimit(resource.RLIMIT_NOFILE, (r1, r2))

    # Start the board-server in a separate process so it can't interfere.
    # Even if it dies we don't care.
    try:
        if not isinstance(config.board_listen, tuple):
            # Don't bother if something is already listening.
            check_socket(config.board_listen)
        Process(target=board.run, args=(config, ), name='board-server').start()
    except Exception:
        pass

    iowrapper.main()

    # setup statmsg sink and tell address using ENV
    statmsg_rd, statmsg_wr = socket.socketpair(socket.AF_UNIX,
                                               socket.SOCK_DGRAM)
    os.environ['BD_STATUS_FD'] = str(statmsg_wr.fileno())

    def buf_up(fh, opt):
        sock = socket.fromfd(fh.fileno(), socket.AF_UNIX, socket.SOCK_DGRAM)
        sock.setsockopt(socket.SOL_SOCKET, opt, 256 * 1024)
        # does not close fh, because fromfd dups the fd (but not the underlying socket)
        sock.close()

    buf_up(statmsg_wr, socket.SO_SNDBUF)
    buf_up(statmsg_rd, socket.SO_RCVBUF)

    t = DeadlyThread(target=statmsg_sink,
                     args=(statmsg_rd, ),
                     name="statmsg sink")
    t.daemon = True
    t.start()

    # do all main-stuff, i.e. run server
    sys.stdout = autoflush.AutoFlush(sys.stdout)
    sys.stderr = autoflush.AutoFlush(sys.stderr)
    atexit.register(exitfunction)
    signal.signal(signal.SIGTERM, exitfunction)
    signal.signal(signal.SIGINT, exitfunction)

    signal.signal(signal.SIGUSR1, siginfo)
    signal.siginterrupt(signal.SIGUSR1, False)
    if hasattr(signal, 'pthread_sigmask'):
        signal.pthread_sigmask(signal.SIG_UNBLOCK, {signal.SIGUSR1})
    if hasattr(signal, 'SIGINFO'):
        signal.signal(signal.SIGINFO, siginfo)
        signal.siginterrupt(signal.SIGINFO, False)

    if isinstance(config.listen, tuple):
        server = ThreadedHTTPServer(config.listen, XtdHandler)
    else:
        check_socket(config.listen)
        # We want the socket to be world writeable, protect it with dir permissions.
        u = os.umask(0)
        server = ThreadedUnixHTTPServer(config.listen, XtdHandler)
        os.umask(u)

    if config.get('urd_local'):
        from accelerator import urd
        t = DeadlyThread(target=urd.main,
                         args=(['urd', '--quiet',
                                '--allow-passwordless'], config),
                         name='urd')
        t.daemon = True
        t.start()

    ctrl = control.Main(config, options, config.url)
    print()
    ctrl.print_workdirs()
    print()

    XtdHandler.ctrl = ctrl
    job_tracking[None].workdir = ctrl.target_workdir

    for n in (
            "project_directory",
            "result_directory",
            "input_directory",
    ):
        v = config.get(n)
        n = n.replace("_", " ")
        print("%17s: %s" % (
            n,
            v,
        ))
    for n in (
            "board",
            "urd",
    ):
        v = config.get(n + '_listen')
        if v and not config.get(n + '_local', True):
            extra = ' (remote)'
        else:
            extra = ''
        print("%17s: %s%s" % (
            n,
            v,
            extra,
        ))
    print()

    print("Serving on %s\n" % (config.listen, ), file=sys.stderr)
    server.serve_forever()
예제 #8
0
def main():
    # Several commands use SIGUSR1 which (naturally...) defaults to killing the
    # process, so start by blocking that to minimise the race time.
    if hasattr(signal, 'pthread_sigmask'):
        signal.pthread_sigmask(signal.SIG_BLOCK, {signal.SIGUSR1})
    else:
        # Or if we can't block it, just ignore it.
        signal.signal(signal.SIGUSR1, signal.SIG_IGN)

    # As of python 3.8 the default start_method is 'spawn' on macOS.
    # This doesn't work for us. 'fork' is fairly unsafe on macOS,
    # but it's better than not working at all. See
    # https://bugs.python.org/issue33725
    # for more information.
    import multiprocessing
    if hasattr(multiprocessing, 'set_start_method'):
        # If possible, make the forkserver (used by database updates) pre-import everthing
        if hasattr(multiprocessing, 'set_forkserver_preload'):
            multiprocessing.set_forkserver_preload(
                ['accelerator', 'accelerator.server'])
        multiprocessing.set_start_method('fork')

    from accelerator import g
    g.running = 'shell'

    from accelerator.autoflush import AutoFlush
    main_argv, argv = split_args(sys.argv[1:])
    sys.stdout = AutoFlush(sys.stdout)
    sys.stderr = AutoFlush(sys.stderr)

    # configuration defaults
    aliases = {
        'cat': 'grep -e ""',
    }
    colour_d = {
        'warning': ('RED', ),
        'highlight': ('BOLD', ),
        'grep/highlight': ('RED', ),
        'info': ('BRIGHTBLUE', ),
        'infohighlight': (
            'BOLD',
            'BRIGHTBLUE',
        ),
        'separator': (
            'CYAN',
            'UNDERLINE',
        ),
        'header': (
            'BRIGHTBLUE',
            'BOLD',
        ),
    }
    parse_user_config(aliases, colour_d)
    colour._names.update(colour_d)

    used_aliases = []
    while argv and argv[0] in aliases:
        alias = argv[0]
        if alias == 'noalias':  # save the user from itself
            break
        try:
            expanded = shlex.split(aliases[alias])
        except ValueError as e:
            raise ValueError('Failed to expand alias %s (%r): %s' % (
                argv[0],
                aliases[argv[0]],
                e,
            ))
        more_main_argv, argv = split_args(expanded + argv[1:])
        main_argv.extend(more_main_argv)
        if expanded and alias == expanded[0]:
            break
        used_aliases.append(alias)
        if alias in used_aliases[:-1]:
            raise ValueError('Alias loop: %r' % (used_aliases, ))

    while argv and argv[0] == 'noalias':
        argv.pop(0)

    epilog = ['commands:', '']
    cmdlen = max(len(cmd) for cmd in COMMANDS)
    template = '  %%%ds  %%s' % (cmdlen, )
    for cmd, func in sorted(COMMANDS.items()):
        epilog.append(template % (
            cmd,
            func.help,
        ))
    epilog.append('')
    epilog.append('aliases:')
    epilog.extend('  %s = %s' % item for item in sorted(aliases.items()))
    epilog.append('')
    epilog.append('use "' +
                  colour('%(prog)s <command> --help', 'help/highlight') +
                  '" for <command> usage')
    epilog.append('try "' + colour('%(prog)s intro', 'help/highlight') +
                  '" for an introduction')
    parser = ArgumentParser(
        usage='%(prog)s [--config CONFIG_FILE] command [args]',
        epilog='\n'.join(epilog),
        formatter_class=RawDescriptionHelpFormatter,
    )
    parser.add_argument('--config',
                        metavar='CONFIG_FILE',
                        help='configuration file')
    parser.add_argument('--version',
                        action='store_true',
                        help='alias for the version command')
    args = parser.parse_args(main_argv)
    if args.version:
        sys.exit(cmd_version(()))
    args.command = argv.pop(0) if argv else None
    if args.command not in COMMANDS:
        parser.print_help(file=sys.stderr)
        if args.command is not None:
            print(file=sys.stderr)
            print('Unknown command "%s"' % (args.command, ), file=sys.stderr)
        sys.exit(2)
    config_fn = args.config
    if args.command in (
            'init',
            'intro',
            'version',
    ):
        config_fn = False
    cmd = COMMANDS[args.command]
    debug_cmd = getattr(cmd, 'is_debug', False)
    try:
        setup(config_fn, debug_cmd)
        argv.insert(0, '%s %s' % (
            basename(sys.argv[0]),
            args.command,
        ))
        return cmd(argv)
    except UserError as e:
        print(e, file=sys.stderr)
        return 1
    except OSError as e:
        if e.errno == errno.EPIPE:
            return 1
        else:
            raise
    except KeyboardInterrupt:
        # Exiting with KeyboardInterrupt causes python to print a traceback.
        # We don't want that, but we do want to exit from SIGINT (so the
        # calling process can know that happened).
        signal.signal(signal.SIGINT, signal.SIG_DFL)
        os.kill(os.getpid(), signal.SIGINT)
        # If that didn't work let's re-raise the KeyboardInterrupt.
        raise
예제 #9
0
파일: init.py 프로젝트: drougge/accelerator
def main(argv):
	from os import makedirs, listdir, chdir
	from os.path import exists, join, realpath
	from sys import version_info
	from argparse import RawDescriptionHelpFormatter
	from accelerator.compat import ArgumentParser
	from accelerator.error import UserError

	parser = ArgumentParser(
		prog=argv.pop(0),
		description=r'''
			creates an accelerator project directory.
			defaults to the current directory.
			creates accelerator.conf, a method dir, a workdir and result dir.
			both the method directory and workdir will be named <NAME>,
			"dev" by default.
		'''.replace('\t', ''),
		formatter_class=RawDescriptionHelpFormatter,
	)
	parser.add_argument('--slices', default=None, type=int, help='override slice count detection')
	parser.add_argument('--name', default='dev', help='name of method dir and workdir, default "dev"')
	parser.add_argument('--input', default='# /some/path where you want import methods to look.', help='input directory')
	parser.add_argument('--force', action='store_true', help='go ahead even though directory is not empty, or workdir exists with incompatible slice count')
	parser.add_argument('directory', default='.', help='project directory to create. default "."', metavar='DIR', nargs='?')
	options = parser.parse_args(argv)

	assert options.name
	assert '/' not in options.name
	if not options.input.startswith('#'):
		options.input = quote(realpath(options.input))
	prefix = realpath(options.directory)
	workdir = join(prefix, 'workdirs', options.name)
	slices_conf = join(workdir, '.slices')
	try:
		with open(slices_conf, 'r') as fh:
			workdir_slices = int(fh.read())
	except IOError:
		workdir_slices = None
	if workdir_slices and options.slices is None:
		options.slices = workdir_slices
	if options.slices is None:
		from multiprocessing import cpu_count
		options.slices = cpu_count()
	if workdir_slices and workdir_slices != options.slices and not options.force:
		raise UserError('Workdir %r has %d slices, refusing to continue with %d slices' % (workdir, workdir_slices, options.slices,))

	if not options.force and exists(options.directory) and listdir(options.directory):
		raise UserError('Directory %r is not empty.' % (options.directory,))
	if not exists(options.directory):
		makedirs(options.directory)
	chdir(options.directory)
	for dir_to_make in ('.socket.dir', 'urd.db',):
		if not exists(dir_to_make):
			makedirs(dir_to_make, 0o750)
	for dir_to_make in (workdir, 'results',):
		if not exists(dir_to_make):
			makedirs(dir_to_make)
	with open(slices_conf, 'w') as fh:
		fh.write('%d\n' % (options.slices,))
	method_dir = options.name
	if not exists(method_dir):
		makedirs(method_dir)
	with open(join(method_dir, '__init__.py'), 'w') as fh:
		pass
	with open(join(method_dir, 'methods.conf'), 'w') as fh:
		fh.write('example\n')
	with open(join(method_dir, 'a_example.py'), 'w') as fh:
		fh.write(a_example)
	with open(join(method_dir, 'build.py'), 'w') as fh:
		fh.write(build_script)
	with open('accelerator.conf', 'w') as fh:
		fh.write(config_template.format(
			name=quote(options.name),
			slices=options.slices,
			input=options.input,
			major=version_info.major,
			minor=version_info.minor,
			micro=version_info.micro,
		))