Пример #1
0
def main():
    args = parse_args()
    processor = TaskProcessor()
    processor.load_plugin(provider_module='task_processing.plugins.mesos')
    mesos_executor = processor.executor_from_config(provider='mesos_task',
                                                    provider_config={
                                                        'secret': args.secret,
                                                        'mesos_address':
                                                        args.master,
                                                        'pool': args.pool,
                                                        'role': args.role,
                                                    })

    executor = processor.executor_from_config(provider='timeout',
                                              provider_config={
                                                  'downstream_executor':
                                                  mesos_executor,
                                              })

    TaskConfig = mesos_executor.TASK_CONFIG_INTERFACE
    runner = Sync(executor=executor)
    task_config = TaskConfig(image='docker-dev.yelpcorp.com/dumb-busybox',
                             cmd='exec dumb-init /bin/sleep 30',
                             timeout=10)
    result = runner.run(task_config)
    print(result)

    runner.stop()
Пример #2
0
def main():
    args = parse_args()
    processor = TaskProcessor()
    processor.load_plugin(provider_module='task_processing.plugins.mesos')
    mesos_executor = processor.executor_from_config(provider='mesos_task',
                                                    provider_config={
                                                        'secret': args.secret,
                                                        'mesos_address':
                                                        args.master,
                                                        'pool': args.pool,
                                                        'role': args.role,
                                                    })

    executor = processor.executor_from_config(provider='logging',
                                              provider_config={
                                                  'downstream_executor':
                                                  mesos_executor,
                                              })

    TaskConfig = mesos_executor.TASK_CONFIG_INTERFACE
    runner = Sync(executor=executor)
    task_config = TaskConfig(
        image="ubuntu:14.04",
        cmd="bash -c 'for i in $(seq 1 5); do echo $i&&sleep 10; done'")
    result = runner.run(task_config)
    print(result)

    runner.stop()
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    common.note(
        'Archiving directory \'payload\' to %s' % options.isolate_server)
    payload_isolated_sha1 = common.capture(
        [
          'isolateserver.py',
          'archive',
          '--isolate-server', options.isolate_server,
          'payload',
        ]).split()[0]

    common.note(
        'Archiving custom .isolated file to %s' % options.isolate_server)
    handle, isolated = tempfile.mkstemp(
        prefix=u'hello_world', suffix=u'.isolated')
    os.close(handle)
    try:
      data = {
        'algo': 'sha-1',
        'command': ['python', 'hello_world.py', 'Custom'],
        'includes': [payload_isolated_sha1],
        'version': '1.0',
      }
      with open(isolated, 'wb') as f:
        json.dump(data, f, sort_keys=True, separators=(',',':'))
      isolated_sha1 = common.capture(
          [
            'isolateserver.py',
            'archive',
            '--isolate-server', options.isolate_server,
            isolated,
          ]).split()[0]
    finally:
      common.note('Deleting temporary file, it is not necessary anymore.')
      os.remove(isolated)

    # Now trigger as usual. You could look at run_exmaple_swarming_involved for
    # the involved way but use the short way here.

    common.note('Running %s on %s' % (isolated_sha1, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--dimension', 'pool', 'default',
      '--task-name', options.task_name,
      isolated_sha1,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
Пример #4
0
def main():
    ctx = common.parse_args()
    if ctx.input:
        threads = common.load_threads(ctx.input)
        if ctx.invert:
            common.print_inverted_callgraph(threads, ctx.threshold)
        else:
            common.print_callgraph(threads, ctx.threshold)
    elif ctx.pid:
        this_path = os.path.realpath(__file__)
        pargs = ' '.join(sys.argv[1:])

        args = [
            ctx.gdb_path, "-q", "--ex",
            "source %s" % this_path, "--ex",
            "gdbpmp %s" % pargs
        ]
        proc = subprocess.Popen(args)

        try:
            while proc.poll() is None:
                time.sleep(0.1)

        except KeyboardInterrupt:
            proc.send_signal(signal.SIGINT)
            while proc.poll() is None:
                time.sleep(0.1)
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    common.note(
        'Archiving directory \'payload\' to %s' % options.isolate_server)
    payload_isolated_sha1 = common.capture(
        [
          'isolateserver.py',
          'archive',
          '--isolate-server', options.isolate_server,
          'payload',
        ]).split()[0]

    common.note(
        'Archiving custom .isolated file to %s' % options.isolate_server)
    handle, isolated = tempfile.mkstemp(
        prefix=u'hello_world', suffix=u'.isolated')
    os.close(handle)
    try:
      data = {
        'algo': 'sha-1',
        'command': ['python', 'hello_world.py', 'Custom'],
        'includes': [payload_isolated_sha1],
        'version': '1.0',
      }
      with open(isolated, 'wb') as f:
        json.dump(data, f, sort_keys=True, separators=(',',':'))
      isolated_sha1 = common.capture(
          [
            'isolateserver.py',
            'archive',
            '--isolate-server', options.isolate_server,
            isolated,
          ]).split()[0]
    finally:
      common.note('Deleting temporary file, it is not necessary anymore.')
      os.remove(isolated)

    # Now trigger as usual. You could look at run_exmaple_swarming_involved for
    # the involved way but use the short way here.

    common.note('Running %s on %s' % (isolated_sha1, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--dimension', 'pool', 'default',
      '--task-name', options.task_name,
      isolated_sha1,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
Пример #6
0
def main():
    args = parse_args()

    processor = TaskProcessor()
    processor.load_plugin(provider_module='task_processing.plugins.mesos')
    executor = processor.executor_from_config(provider='mesos_task',
                                              provider_config={
                                                  'secret': args.secret,
                                                  'mesos_address': args.master,
                                                  'role': args.role,
                                              })

    counter = Counter()
    runner = Async(executor, [
        EventHandler(predicate=lambda x: x.terminal, cb=counter.process_event)
    ])

    TaskConfig = executor.TASK_CONFIG_INTERFACE
    tasks_to_launch = 2
    for _ in range(tasks_to_launch):
        task_config = TaskConfig(image='busybox', cmd='/bin/true')
        runner.run(task_config)

    for _ in range(5):
        print('terminated {} tasks'.format(counter.terminated))
        if counter.terminated >= tasks_to_launch:
            break
        time.sleep(2)

    runner.stop()
    return 0 if counter.terminated >= tasks_to_launch else 1
Пример #7
0
def main():
	out_fmt, infiles, opts = common.parse_args('biosequence')
	
	for in_path in infiles:
		dir, base, ext = common.dir_base_ext (in_path)
		in_fmt = (opts.input_format or EXT_TO_FORMAT.get (ext, '')).lower()
		assert (in_fmt), "no known input format specified"
		
		# calculate output format and name
		out_path = common.make_out_path (dir, base,
			opts.output_extension or FORMAT_TO_EXT[out_fmt])
		
		# open & read infile
		in_hndl = open (in_path, 'rb')
		in_seqs = [x for x in SeqIO.parse (in_hndl, in_fmt)]
		in_hndl.close()
		assert (in_seqs), \
			'''No sequences read from %s. Perhaps the file is not in %s format.''' % (file_name, in_fmt)
		
		# write out
		out_hndl = open (out_path, 'wb')
		if opts.seqtype:
			for s in in_seqs:
				s.alphabet = opts.seqtype
		if out_fmt in ['nexus']:
			# need to hack to handle this crap
			from Bio.Align import MultipleSeqAlignment
			aln = MultipleSeqAlignment(in_seqs,
				alphabet=opts.seqtype or BIOSEQ_ALPHABET_PROTEIN)
			AlignIO.write (aln, out_hndl, out_fmt)
		else:
			SeqIO.write (in_seqs, out_hndl, out_fmt)
		out_hndl.close()
Пример #8
0
def main():
    out_fmt, infiles, opts = common.parse_args('biosequence')

    for in_path in infiles:
        dir, base, ext = common.dir_base_ext(in_path)
        in_fmt = (opts.input_format or EXT_TO_FORMAT.get(ext, '')).lower()
        assert (in_fmt), "no known input format specified"

        # calculate output format and name
        out_path = common.make_out_path(
            dir, base, opts.output_extension or FORMAT_TO_EXT[out_fmt])

        # open & read infile
        in_hndl = open(in_path, 'rb')
        in_seqs = [x for x in SeqIO.parse(in_hndl, in_fmt)]
        in_hndl.close()
        assert (in_seqs), \
         '''No sequences read from %s. Perhaps the file is not in %s format.''' % (file_name, in_fmt)

        # write out
        out_hndl = open(out_path, 'wb')
        if opts.seqtype:
            for s in in_seqs:
                s.alphabet = opts.seqtype
        if out_fmt in ['nexus']:
            # need to hack to handle this crap
            from Bio.Align import MultipleSeqAlignment
            aln = MultipleSeqAlignment(in_seqs,
                                       alphabet=opts.seqtype
                                       or BIOSEQ_ALPHABET_PROTEIN)
            AlignIO.write(aln, out_hndl, out_fmt)
        else:
            SeqIO.write(in_seqs, out_hndl, out_fmt)
        out_hndl.close()
Пример #9
0
def main():
    out_fmt, infiles, opts = common.parse_args('alignment')

    for in_path in infiles:
        dir, base, ext = common.dir_base_ext(in_path)
        in_fmt = (opts.input_format or EXT_TO_FORMAT.get(ext, '')).lower()
        assert (in_fmt), "no known input format specified"

        # calculate output format and name
        out_path = common.make_out_path(
            dir, base, opts.output_extension or FORMAT_TO_EXT[out_fmt])

        # open & read infile
        in_hndl = open(in_path, 'rb')
        in_alns = [x for x in AlignIO.parse(in_hndl, in_fmt)]
        in_hndl.close()
        assert (in_alns), \
         '''No alignments read from %s. Perhaps the file is not in %s format.''' % (file_name, in_fmt)

        # write out
        out_hndl = open(out_path, 'wb')
        if opts.seqtype:
            for s in in_alns:
                s._alphabet = opts.seqtype
        AlignIO.write(in_alns, out_hndl, out_fmt)
        out_hndl.close()
Пример #10
0
def main():
    args = common.parse_args(use_isolate_server=True, use_swarming=False)
    tempdir = unicode(tempfile.mkdtemp(prefix=u'hello_world'))
    try:
        isolated_sha1 = common.archive(tempdir, args.isolate_server,
                                       args.verbose, args.which)

        common.note(
            'Downloading from %s and running in a temporary directory' %
            args.isolate_server)
        cachei = os.path.join(tempdir, u'cachei')
        cachen = os.path.join(tempdir, u'cachen')
        common.run([
            'run_isolated.py',
            '--cache',
            cachei.encode('utf-8'),
            '--named-cache-root',
            cachen.encode('utf-8'),
            '--isolate-server',
            args.isolate_server,
            '--isolated',
            isolated_sha1,
            '--no-log',
            '--',
            args.which + u'.py',
            'Dear 💩',
            '${ISOLATED_OUTDIR}',
        ], args.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Пример #11
0
def main():
    args = parse_args()
    processor = TaskProcessor()
    processor.load_plugin(provider_module='task_processing.plugins.mesos')
    executor = processor.executor_from_config(provider='mesos_task',
                                              provider_config={
                                                  'secret': args.secret,
                                                  'mesos_address': args.master,
                                                  'pool': args.pool,
                                                  'role': args.role,
                                              })

    TaskConfig = executor.TASK_CONFIG_INTERFACE
    task_config = TaskConfig(image="busybox", cmd='/bin/true')
    # This only works on agents that have added mesos as a containerizer
    # task_config = TaskConfig(containerizer='MESOS', cmd='/bin/true')

    with ThreadPoolExecutor(max_workers=2) as futures_executor:
        runner = Promise(executor, futures_executor)
        future = runner.run(task_config)
        wait([future])
        result = future.result()
        print(result)
        print(result.raw)
        runner.stop()

    return 0 if result.success else 1
Пример #12
0
def main():
    available_actions = {
        'register-user': register_user_action,
        'login': login_user_action,
        'create-list': create_list_action,
        'view-list': view_list_action,
        'delete-list': delete_list_action,
        'add-list-item': add_list_item_action,
        'update-list-item': update_list_item_action,
        'delete-list-item': delete_list_item_action,
    }
    try:
        action = parse_args(1)[0]
    except ValidationError:
        print("Missing action:")
        for ac in available_actions.keys():
            print(ac)
        return

    executable_action = available_actions.get(action)
    if not executable_action:
        print("Invalid argument.")
        return

    # call action
    try:
        executable_action()
    except TokenError:
        print("You must login before this action")
Пример #13
0
def main():

    model_dir, output_dir, redshift_table, subvolumes, obs_dir = common.parse_args()
    args_minimal = (model_dir, output_dir, redshift_table, subvolumes)
    args_with_obsdir = args_minimal + (obs_dir,)

    # Modules and which arguments they take
    args_and_mods = {
        args_minimal: (hothalo, smhm),
        args_with_obsdir: (coldgas, global_quantities, hmf, sizes, smf),
    }

    n_mods = functools.reduce(lambda x, y: x + y, [len(l) for l in args_and_mods.values()])
    n_procs = int(os.environ.get('SHARK_PLOT_PROCS', n_mods))
    print("Using %d processes to produce all plots" % n_procs)
    pool = multiprocessing.Pool(n_procs)

    # Go, go, go!
    futures = []
    for args, mods in args_and_mods.items():
        futures += [pool.apply_async(m.main, args) for m in mods]

    # Wait for all results to finish
    for f in futures:
        f.get()
Пример #14
0
def main():
    options = common.parse_args(use_isolate_server=False, use_swarming=False)
    tempdir = tempfile.mkdtemp(prefix='hello_world')
    try:
        # All the files are put in a temporary directory. This is optional and
        # simply done so the current directory doesn't have the following files
        # created:
        # - hello_world.isolated
        # - hello_world.isolated.state
        # - cache/
        # - hashtable/
        cachedir = os.path.join(tempdir, 'cache')
        hashtabledir = os.path.join(tempdir, 'hashtable')
        isolateddir = os.path.join(tempdir, 'isolated')
        isolated = os.path.join(isolateddir, 'hello_world.isolated')

        os.mkdir(isolateddir)

        common.note('Archiving to %s' % hashtabledir)
        # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
        common.run([
            'isolate.py',
            'hashtable',
            '--isolate',
            os.path.join('payload', 'hello_world.isolate'),
            '--isolated',
            isolated,
            '--outdir',
            hashtabledir,
            '--config-variable',
            'OS',
            'Yours',
        ], options.verbose)

        common.note(
            'Running the executable in a temporary directory from the hash table'
        )
        with open(isolated, 'rb') as f:
            isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
        common.run(
            [
                'run_isolated.py',
                '--cache',
                cachedir,
                '--indir',
                hashtabledir,
                '--hash',
                isolated_sha1,
                # TODO(maruel): Should not require this.
                '--namespace',
                'default',
                '--no-log',
            ],
            options.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Пример #15
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    try:
        tempdir = tempfile.mkdtemp(prefix=u'hello_world')
        try:
            _, hashval = common.isolate(tempdir, options.isolate_server,
                                        options.swarming_os, options.verbose)

            json_file = os.path.join(tempdir, 'task.json')
            common.note('Running on %s' % options.swarming)
            cmd = [
                'swarming.py',
                'trigger',
                '--swarming',
                options.swarming,
                '--isolate-server',
                options.isolate_server,
                '--dimension',
                'os',
                options.swarming_os,
                '--task-name',
                options.task_name,
                '--dump-json',
                json_file,
                '--isolated',
                hashval,
                '--shards',
                '2',
            ]
            if options.idempotent:
                cmd.append('--idempotent')
            if options.priority is not None:
                cmd.extend(('--priority', str(options.priority)))
            cmd.extend(('--', '${ISOLATED_OUTDIR}'))
            common.run(cmd, options.verbose)

            common.note('Getting results from %s' % options.swarming)
            common.run([
                'swarming.py',
                'collect',
                '--swarming',
                options.swarming,
                '--json',
                json_file,
                '--task-output-dir',
                'example_result',
            ], options.verbose)
            for root, _, files in os.walk('example_result'):
                for name in files:
                    p = os.path.join(root, name)
                    with open(p, 'rb') as f:
                        print('%s content:' % p)
                        print(f.read())
            return 0
        finally:
            shutil.rmtree(tempdir)
    except subprocess.CalledProcessError as e:
        return e.returncode
Пример #16
0
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
      # All the files are put in a temporary directory. This is optional and
      # simply done so the current directory doesn't have the following files
      # created:
      # - hello_world.isolated
      # - hello_world.isolated.state
      isolated = os.path.join(tempdir, 'hello_world.isolated')
      common.note('Archiving to %s' % options.isolate_server)
      common.run(
          [
            'isolate.py',
            'archive',
            '--isolate', os.path.join('payload', 'hello_world.isolate'),
            '--isolated', isolated,
            '--isolate-server', options.isolate_server,
            '--config-variable', 'OS', options.swarming_os,
          ], options.verbose)
      with open(isolated, 'rb') as f:
        hashval = hashlib.sha1(f.read()).hexdigest()

      json_file = os.path.join(tempdir, 'task.json')
      common.note('Running on %s' % options.swarming)
      cmd = [
        'swarming.py',
        'trigger',
        '--swarming', options.swarming,
        '--isolate-server', options.isolate_server,
        '--dimension', 'os', options.swarming_os,
        '--task-name', options.task_name,
        '--dump-json', json_file,
        '--isolated', hashval,
      ]
      if options.idempotent:
        cmd.append('--idempotent')
      if options.priority is not None:
        cmd.extend(('--priority', str(options.priority)))
      common.run(cmd, options.verbose)

      common.note('Getting results from %s' % options.swarming)
      common.run(
          [
            'swarming.py',
            'collect',
            '--swarming', options.swarming,
            '--json', json_file,
          ], options.verbose)
      return 0
    finally:
      shutil.rmtree(tempdir)
  except subprocess.CalledProcessError as e:
    print e.returncode or 1
Пример #17
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
        # All the files are put in a temporary directory. This is optional and
        # simply done so the current directory doesn't have the following files
        # created:
        # - hello_world.isolated
        # - hello_world.isolated.state
        isolated = os.path.join(tempdir, 'hello_world.isolated')

        common.note(
            'Creating hello_world.isolated. Note that this doesn\'t archives '
            'anything.')
        common.run([
            'isolate.py',
            'check',
            '--isolate',
            os.path.join('payload', 'hello_world.isolate'),
            '--isolated',
            isolated,
            '--config-variable',
            'OS',
            options.swarming_os,
        ], options.verbose)

        common.note('Running the job remotely. This:\n'
                    ' - archives to %s\n'
                    ' - runs and collect results via %s' %
                    (options.isolate_server, options.swarming))
        cmd = [
            'swarming.py',
            'run',
            '--swarming',
            options.swarming,
            '--isolate-server',
            options.isolate_server,
            '--dimension',
            'os',
            options.swarming_os,
            '--task-name',
            options.task_name,
            isolated,
        ]
        if options.idempotent:
            cmd.append('--idempotent')
        if options.priority is not None:
            cmd.extend(('--priority', str(options.priority)))
        common.run(cmd, options.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Пример #18
0
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    tempdir = tempfile.mkdtemp(prefix='hello_world')
    try:
      # All the files are put in a temporary directory. This is optional and
      # simply done so the current directory doesn't have the following files
      # created:
      # - hello_world.isolated
      # - hello_world.isolated.state
      isolated = os.path.join(tempdir, 'hello_world.isolated')
      common.note('Archiving to %s' % options.isolate_server)
      common.run(
          [
            'isolate.py',
            'archive',
            '--isolate', os.path.join('payload', 'hello_world.isolate'),
            '--isolated', isolated,
            '--isolate-server', options.isolate_server,
            '--config-variable', 'OS', options.isolate_os,
          ], options.verbose)
      with open(isolated, 'rb') as f:
        hashval = hashlib.sha1(f.read()).hexdigest()
    finally:
      shutil.rmtree(tempdir)

    # At this point, the temporary directory is not needed anymore.
    tempdir = None

    task_name = common.unique_task_name()
    common.note('Running on %s' % options.swarming)
    common.run(
        [
          'swarming.py',
          'trigger',
          '--swarming', options.swarming,
          '--isolate-server', options.isolate_server,
          '--dimension', 'os', options.swarming_os,
          '--task-name', task_name,
          hashval,
        ], options.verbose)

    common.note('Getting results from %s' % options.swarming)
    common.run(
        [
          'swarming.py',
          'collect',
          '--swarming', options.swarming,
          task_name,
        ], options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    print e.returncode or 1
Пример #19
0
def main():
    configure_tensorflow()
    args = parse_args()

    if args.format_data:
        format_data()
    if args.train:
        train(args.batch_size, args.epochs, args.verbose, args.data_percentage, args.patience)
    if args.plot:
        plot()
    if args.out:
        output(args.file_count)
Пример #20
0
def main(stack):
    parser = argparse.ArgumentParser(description='Run the Skip compiler',
                                     parents=[
                                         common.commonArguments(
                                             needsBackend=False,
                                             backend='native'),
                                         arguments()
                                     ])
    parser.add_argument('srcs', metavar='SOURCE', nargs='+')

    args = common.parse_args(parser)
    compile(stack, args)
Пример #21
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=False)
    tempdir = tempfile.mkdtemp(prefix='hello_world')
    try:
        # All the files are put in a temporary directory. This is optional and
        # simply done so the current directory doesn't have the following files
        # created:
        # - hello_world.isolated
        # - hello_world.isolated.state
        # - cache/
        cachedir = os.path.join(tempdir, 'cache')
        isolateddir = os.path.join(tempdir, 'isolated')
        isolated = os.path.join(isolateddir, 'hello_world.isolated')

        os.mkdir(isolateddir)

        common.note('Archiving to %s' % options.isolate_server)
        # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
        # Note that --config-variable OS is not specified and nobody cares.
        common.run([
            'isolate.py',
            'archive',
            '--isolate',
            os.path.join('payload', 'hello_world.isolate'),
            '--isolated',
            isolated,
            '--isolate-server',
            options.isolate_server,
        ], options.verbose)

        common.note(
            'Downloading from %s and running in a temporary directory' %
            options.isolate_server)
        with open(isolated, 'rb') as f:
            isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
        common.run([
            'run_isolated.py',
            '--cache',
            cachedir,
            '--isolate-server',
            options.isolate_server,
            '--hash',
            isolated_sha1,
            '--no-log',
        ], options.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Пример #22
0
def main(stack):
    remainder = common.splitRemainder()
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=description.format(
            annotation=default_annotation,
            delegate_function=default_delegate_function),
        parents=[
            common.commonArguments(needsBackend=False),
            skip_native_compile.arguments(),
        ],
    )
    parser.add_argument(
        "program",
        type=str,
        help="The program for which tests should be run (project:unit)",
    )
    parser.add_argument("--backend",
                        default=os.environ.get("BACKEND", "native"))
    parser.add_argument("--timeout",
                        type=int,
                        default=os.environ.get("SKIP_TEST_TIMEOUT", "300"))
    parser.add_argument("--watch", default=False, action="store_true")

    args = common.parse_args(parser)
    mainFilePath = build_main_file(stack, args.program, args.backend)

    args.srcs = [args.program, mainFilePath]

    if args.backend == "native":
        binFile = skip_native_compile.compile(stack, args)
        cmd = (binFile.name, )
        if args.watch:
            cmd += ("--watch", )
    else:
        print("Uknown backend %s" % (args.backend))
        exit(2)

    cmd += tuple(remainder)
    logger.debug("Running: " + ' '.join(map(pipes.quote, cmd)))
    with common.PerfTimer("skip_native_exec.test_runtime"):
        res = subprocess.call(
            ("ulimit -t %d ; " % (args.timeout, )) +
            ' '.join(map(pipes.quote, cmd)),
            shell=True,
            env=os.environ,
        )
    if res != 0:
        sys.exit(res)
def main():
  options = common.parse_args(use_isolate_server=False, use_swarming=False)
  tempdir = tempfile.mkdtemp(prefix='hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    # - cache/
    # - hashtable/
    cachedir = os.path.join(tempdir, 'cache')
    hashtabledir = os.path.join(tempdir, 'hashtable')
    isolateddir = os.path.join(tempdir, 'isolated')
    isolated = os.path.join(isolateddir, 'hello_world.isolated')

    os.mkdir(isolateddir)

    common.note('Archiving to %s' % hashtabledir)
    # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
    common.run(
        [
          'isolate.py',
          'hashtable',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--outdir', hashtabledir,
          '--config-variable', 'OS', 'Yours',
        ], options.verbose)

    common.note(
        'Running the executable in a temporary directory from the hash table')
    with open(isolated, 'rb') as f:
      isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
    common.run(
        [
          'run_isolated.py',
          '--cache', cachedir,
          '--indir', hashtabledir,
          '--hash', isolated_sha1,
          # TODO(maruel): Should not require this.
          '--namespace', 'default',
          '--no-log',
        ], options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
      _, hashval = common.isolate(
          tempdir, options.isolate_server, options.swarming_os, options.verbose)

      json_file = os.path.join(tempdir, 'task.json')
      common.note('Running on %s' % options.swarming)
      cmd = [
        'swarming.py',
        'trigger',
        '--swarming', options.swarming,
        '--isolate-server', options.isolate_server,
        '--dimension', 'os', options.swarming_os,
        '--dimension', 'pool', 'default',
        '--task-name', options.task_name,
        '--dump-json', json_file,
        '--isolated', hashval,
        '--shards', '2',
      ]
      if options.idempotent:
        cmd.append('--idempotent')
      if options.priority is not None:
        cmd.extend(('--priority', str(options.priority)))
      cmd.extend(('--', '${ISOLATED_OUTDIR}'))
      common.run(cmd, options.verbose)

      common.note('Getting results from %s' % options.swarming)
      common.run(
          [
            'swarming.py',
            'collect',
            '--swarming', options.swarming,
            '--json', json_file,
            '--task-output-dir', 'example_result',
          ], options.verbose)
      for root, _, files in os.walk('example_result'):
        for name in files:
          p = os.path.join(root, name)
          with open(p, 'rb') as f:
            print('%s content:' % p)
            print(f.read())
      return 0
    finally:
      shutil.rmtree(tempdir)
  except subprocess.CalledProcessError as e:
    return e.returncode
Пример #25
0
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  tempdir = tempfile.mkdtemp(prefix=u'hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    isolated = os.path.join(tempdir, 'hello_world.isolated')

    common.note(
        'Creating hello_world.isolated. Note that this doesn\'t archives '
        'anything.')
    common.run(
        [
          'isolate.py',
          'check',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--config-variable', 'OS', options.swarming_os,
        ], options.verbose)

    common.note(
        'Running the job remotely. This:\n'
        ' - archives to %s\n'
        ' - runs and collect results via %s' %
        (options.isolate_server, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--task-name', options.task_name,
      isolated,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
Пример #26
0
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=False)
  tempdir = tempfile.mkdtemp(prefix='hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    # - cache/
    cachedir = os.path.join(tempdir, 'cache')
    isolateddir = os.path.join(tempdir, 'isolated')
    isolated = os.path.join(isolateddir, 'hello_world.isolated')

    os.mkdir(isolateddir)

    common.note('Archiving to %s' % options.isolate_server)
    # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
    # Note that --config-variable OS is not specified and nobody cares.
    common.run(
        [
          'isolate.py',
          'archive',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--isolate-server', options.isolate_server,
        ], options.verbose)

    common.note(
        'Downloading from %s and running in a temporary directory' %
        options.isolate_server)
    with open(isolated, 'rb') as f:
      isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
    common.run(
        [
          'run_isolated.py',
          '--cache', cachedir,
          '--isolate-server', options.isolate_server,
          '--hash', isolated_sha1,
          '--no-log',
        ], options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
Пример #27
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
        isolated_hash = common.isolate(tempdir, options.isolate_server,
                                       options.swarming_os, options.verbose)
        common.note('Running the job remotely. This:\n'
                    ' - archives to %s\n'
                    ' - runs and collect results via %s' %
                    (options.isolate_server, options.swarming))
        cmd = [
            'swarming.py',
            'run',
            '--swarming',
            options.swarming,
            '--isolate-server',
            options.isolate_server,
            '--dimension',
            'os',
            options.swarming_os,
            '--dimension',
            'pool',
            'default',
            '--task-name',
            options.task_name,
            '--task-summary-json',
            'example_result.json',
            '--decorate',
            '--isolated',
            isolated_hash,
        ]
        if options.idempotent:
            cmd.append('--idempotent')
        if options.priority is not None:
            cmd.extend(('--priority', str(options.priority)))
        if options.service_account:
            cmd.extend(('--service-account', options.service_account))
        common.run(cmd, options.verbose)
        with open('example_result.json', 'rb') as f:
            print('example_result.json content:')
            print(f.read())
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
def main():
    options = parse_args(logger)
    os.chdir(options.directory)
    logger.addHandler(log_to_file('metrics.log'))

    cwd = os.getcwd()
    files = get_files(options.starts_with)
    processed_data = []

    with Pool(options.cores) as p:
        results = [
            p.apply_async(metrics_file_process, (cwd, file)) for file in files
        ]
        for result in results:
            processed_data.extend(result.get())

    sort_list_of_dict(processed_data)
    write_csv_list_of_dict('metrics_data.csv', processed_data, logger)
Пример #29
0
def main():
    args = parse_args()
    namespace = args.namespace
    environment = args.environment
    ecf_host = args.ecflow_host
    ecf_port = args.ecflow_port
    fetch_new = bool(strtobool(args.fetch_new))

    dimensions = dict(Env=environment)

    metrics = get_ecflow_metrics(fetch_new)
    aggregator = MetricAgregator(metrics, dimensions)
    counts = aggregator.get_metrics_counts()
    meters = aggregator.get_metrics_meters()
    aborted_task_list = aggregator.get_aborted_task_list()
    running_threads = aggregator.get_running_threads()

    for ecflow_data in counts + meters + aborted_task_list + running_threads:
        put_metric_data([ecflow_data], namespace)
Пример #30
0
def main():
    options = parse_args(logger)
    os.chdir(options.directory)
    logger.addHandler(log_to_file('transform.log'))

    cwd = os.getcwd()
    files = get_files(options.starts_with)
    processed_data = []

    # TODO mem profiling not working with mp
    with Pool(options.cores) as p:
        results = [
            p.apply_async(data_file_process, (cwd, file)) for file in files
        ]
        for result in results:
            processed_data.append(result.get())

    sort_list_of_dict(processed_data)
    write_csv_list_of_dict('processed_data.csv', processed_data, logger)
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    tempdir = tempfile.mkdtemp(prefix=u"hello_world")
    try:
        isolated, _ = common.isolate(tempdir, options.isolate_server, options.swarming_os, options.verbose)
        common.note(
            "Running the job remotely. This:\n"
            " - archives to %s\n"
            " - runs and collect results via %s" % (options.isolate_server, options.swarming)
        )
        cmd = [
            "swarming.py",
            "run",
            "--swarming",
            options.swarming,
            "--isolate-server",
            options.isolate_server,
            "--dimension",
            "os",
            options.swarming_os,
            "--task-name",
            options.task_name,
            "--task-summary-json",
            "example_result.json",
            "--decorate",
            isolated,
        ]
        if options.idempotent:
            cmd.append("--idempotent")
        if options.priority is not None:
            cmd.extend(("--priority", str(options.priority)))
        common.run(cmd, options.verbose)
        with open("example_result.json", "rb") as f:
            print("example_result.json content:")
            print(f.read())
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Пример #32
0
def main():
    c = Counter()
    args = parse_args()
    processor = TaskProcessor()
    processor.load_plugin(provider_module='task_processing.plugins.mesos')
    mesos_executor = processor.executor_from_config(provider='mesos_task',
                                                    provider_config={
                                                        'secret': args.secret,
                                                        'mesos_address':
                                                        args.master,
                                                        'pool': args.pool,
                                                        'role': args.role,
                                                    })

    TaskConfig = mesos_executor.TASK_CONFIG_INTERFACE
    runner = Async(
        mesos_executor,
        [EventHandler(
            predicate=lambda x: x.terminal,
            cb=c.process_event,
        )])
    timeout_task_config = TaskConfig(
        image='busybox',
        cmd='exec /bin/sleep 100',
        offer_timeout=5.0,
        cpus=20,
        mem=2048,
        disk=2000,
    )
    runner.run(timeout_task_config)

    for _ in range(50):
        if c.terminated >= 1:
            break
        print("waiting for task %s to finish" % (timeout_task_config.task_id))
        time.sleep(2)

    runner.stop()
    return 0
Пример #33
0
def main():
    args = parse_args()

    if args.format_data:
        format_data(args.file_count)
    else:
        configure_tensorflow()

    if args.train:
        train(args.batch_size, args.epochs, args.verbose, args.data_percentage,
              args.patience)
    if args.encode:
        encode(args.batch_size)
    if args.generate:
        generate(args.file_count, args.batch_size)
    if args.cross:
        cross(args.file_count, args.batch_size)
    if args.plot:
        plot(args.batch_size)
    if args.gui:
        gui()
    if args.out:
        output(args.file_count, args.batch_size)
Пример #34
0
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  tempdir = tempfile.mkdtemp(prefix=u'hello_world')
  try:
    isolated, _ = common.isolate(
        tempdir, options.isolate_server, options.swarming_os, options.verbose)
    common.note(
        'Running the job remotely. This:\n'
        ' - archives to %s\n'
        ' - runs and collect results via %s' %
        (options.isolate_server, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--dimension', 'pool', 'default',
      '--task-name', options.task_name,
      '--task-summary-json', 'example_result.json',
      '--decorate',
      isolated,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    with open('example_result.json', 'rb') as f:
      print('example_result.json content:')
      print(f.read())
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
Пример #35
0
def main():
    out_fmt, infiles, opts = common.parse_args("alignment")

    for in_path in infiles:
        dir, base, ext = common.dir_base_ext(in_path)
        in_fmt = (opts.input_format or EXT_TO_FORMAT.get(ext, "")).lower()
        assert in_fmt, "no known input format specified"

        # calculate output format and name
        out_path = common.make_out_path(dir, base, opts.output_extension or FORMAT_TO_EXT[out_fmt])

        # open & read infile
        in_hndl = open(in_path, "rb")
        in_alns = [x for x in AlignIO.parse(in_hndl, in_fmt)]
        in_hndl.close()
        assert in_alns, """No alignments read from %s. Perhaps the file is not in %s format.""" % (file_name, in_fmt)

        # write out
        out_hndl = open(out_path, "wb")
        if opts.seqtype:
            for s in in_alns:
                s._alphabet = opts.seqtype
        AlignIO.write(in_alns, out_hndl, out_fmt)
        out_hndl.close()
Пример #36
0
def delete_list_item_action():
    try:
        args = parse_args(2, 1)
        pprint.pprint(delete_list_item(*args))
    except ValidationError:
        print("The list id and item id are required for todo item delete")
Пример #37
0
def add_list_item_action():
    try:
        args = parse_args(2, 1)
        pprint.pprint(add_list_item(*args))
    except ValidationError:
        print("The list id is required")
Пример #38
0
def delete_list_action():
    try:
        args = parse_args(1, 1)
        pprint.pprint(delete_todo_list(*args))
    except ValidationError:
        print("The id is required for todo list delete")
Пример #39
0
def view_list_action():
    try:
        args = parse_args(1, 1)
        pprint.pprint(view_todo_list(*args))
    except ValidationError:
        print("The id is required for todo list view")
Пример #40
0
import unittest

import common, split_tests

def suite():
    return unittest.TestSuite([split_tests.suite()])

if __name__ == '__main__':
    (options, args), parser = common.parse_args()
    if options.verbose:
        verbosity=2
    else:
        verbosity=1
    unittest.TextTestRunner(verbosity=verbosity).run(suite())
Пример #41
0
#!/usr/bin/env python

import common
import sys, os, shutil
import magic

common.parse_args(sys.argv)

mime_type = magic.from_file(common.file_path, True)
if not os.path.isdir(common.file_path):
    if mime_type == 'text/x-php':
        os.remove(common.file_path)
Пример #42
0
#!/usr/bin/env python3

from os import chdir
from time import sleep

# Where the magic happens
import common

if __name__ == '__main__':
    # Grab the command line args and calculated verbosity
    (args, verbosity) = common.parse_args()
    # Set up the fancy logger, with optional file logging
    common.configure_logger(verbosity, args.logfile)
    # We're calling this a lot, so let's make it shorter
    logger = common.logger
    # And let's load the config file into common-space
    common.load_configuration(args.configfile)

    # For simplicity, shift to the repo root
    path = common.get_path()
    chdir(path)

    # Load checks and actions into the common-space
    # Sources are loaded by the checks themselves
    # Likewise, filters are loaded by actions
    common.get_thing('checks', common.checks)
    common.get_thing('actions', common.actions)

    while True:
        messages = []
        now = common.now(update=True)
Пример #43
0
import requests, argparse
import common

parser = argparse.ArgumentParser(description='Ping tool')
parser.add_argument("estimate", type=float)
parser.add_argument("test_case_id", type=str)

if __name__ == '__main__':
	args = common.parse_args(parser)

	headers = {'candidate-id': args.candidate_id}
	data = {'estimate': args.estimate, 'test_case_id': args.test_case_id}
	req = requests.post(url = args.endpoint + "/test_case", data=data, headers = headers)
	print req.text
Пример #44
0
getImageFileIndex['map'] = """
function(doc){
    if(doc.type==="file"){
        const att=doc._attachments;
        const contentType=att[Object.keys(att)[0]].content_type;
        if(contentType.substring(0,6)==="image/"){
            emit(doc.name,doc);
        }
    }
}
"""

getImageFileIndex['reduce'] = "_count"

args = common.parse_args()
conn = common.get_connection(args.use_ssl, args.couch_server, args.couch_port)

credentials = common.get_credentials(args.adminuser, args.adminpass)
get_headers = common.get_headers(credentials)
put_headers = common.put_headers(credentials)

# Update all the wiki design docs
conn.request("GET", '/_all_dbs', headers=get_headers)
db_list = common.decode_response(conn.getresponse())
wiki_list = [db for db in db_list if db[0:5] == "wiki_"]

# Update the wiki dbs
for wiki in wiki_list:
    print("Examining " + wiki)
    # Fetch design doc