コード例 #1
0
ファイル: test_cfnas.py プロジェクト: pior/awstools
    def test_command_metrics(self, m_format_as, m_find_one_r, m_find_one_s):
        from awstools.commands import cfnautoscale
        from awstools.utils.cloudformation import RES_TYPE_ASG

        stack = mock.Mock(stack_name='test_stack_name_1',
                          creation_time=datetime.datetime.utcnow())

        resource = mock.Mock(enabled_metrics=['metric1', 'metric2'])

        m_find_one_s.return_value = stack
        m_find_one_r.return_value = resource

        argh.dispatch_command(cfnautoscale.metrics,
                              argv=['testpattern'],
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False)

        m_find_one_s.assert_called_with('testpattern', summary=False)

        self.assertIn(stack.stack_name, self.stdout.getvalue())

        m_find_one_r.assert_called_with(stack, RES_TYPE_ASG)

        self.assertIn('metric1', self.stdout.getvalue())
        self.assertIn('metric2', self.stdout.getvalue())
コード例 #2
0
ファイル: test_cfnas.py プロジェクト: pior/awstools
    def test_command_status(self, m_format, m_find_stacks, m_c_cfn):
        from awstools.commands import cfnautoscale

        m_format.side_effect = lambda x: "format: %s" % x

        stacks = [
            mock.Mock(stack_name='test_stack_name_1'),
            mock.Mock(stack_name='test_stack_name_2'),
            mock.Mock(stack_name='test_stack_name_3')
        ]

        m_find_stacks.return_value = stacks
        m_c_cfn.return_value.describe_stacks.side_effect = zip(stacks)

        argh.dispatch_command(
            cfnautoscale.status,
            argv=['testpattern'],
            output_file=self.stdout,
            errors_file=self.stderr,
            completion=False,
        )

        m_format.assert_has_calls([mock.call(s) for s in stacks])

        for stack in stacks:
            self.assertIn(str(stack), self.stdout.getvalue())
コード例 #3
0
ファイル: test_cfnas.py プロジェクト: pior/awstools
    def test_command_show_cfg(self, m_find_one_r, m_find_stacks, m_c_cfn):
        from awstools.commands import cfnautoscale

        stacks = [
            mock.Mock(stack_name='test_stack_name_1'),
            mock.Mock(stack_name='test_stack_name_2'),
            mock.Mock(stack_name='test_stack_name_3')
        ]

        resources = [
            mock.Mock(instances=[mock.Mock(launch_config_name='lc1')]),
            mock.Mock(instances=[mock.Mock(launch_config_name='lc2')]),
            mock.Mock(instances=[mock.Mock(launch_config_name='lc3')])
        ]

        m_find_stacks.return_value = stacks
        m_c_cfn.return_value.describe_stacks.side_effect = zip(stacks)

        m_find_one_r.side_effect = resources

        argh.dispatch_command(cfnautoscale.show_cfg,
                              argv=['testpattern'],
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False)

        for stack in stacks:
            self.assertIn(stack.stack_name, self.stdout.getvalue())

        for name in ['lc1', 'lc2', 'lc3']:
            self.assertIn(name, self.stdout.getvalue())
コード例 #4
0
ファイル: commands.py プロジェクト: carriercomm/claw
def script(configuration, script_path, script_args):
    """Run a script managed by claw with the provided configuration
       as context."""
    conf = Configuration(configuration)
    if not conf.exists():
        raise NO_INIT
    if not os.path.isfile(script_path):
        for scripts_dir in settings.scripts:
            possible_script_path = scripts_dir / script_path
            possible_script_path2 = scripts_dir / '{0}.py'.format(script_path)
            if possible_script_path.isfile():
                script_path = possible_script_path
                break
            if possible_script_path2.isfile():
                script_path = possible_script_path2
                break
        else:
            raise argh.CommandError('Could not locate {0}'.format(script_path))
    exec_globs = exec_env.exec_globals(script_path)
    execfile(script_path, exec_globs)
    if script_args and script_args[0] in exec_globs:
        func = script_args[0]
        script_args = script_args[1:]
    else:
        func = 'script'
    script_func = exec_globs.get(func)
    if not script_func:
        raise argh.CommandError('Cannot find a function to execute. Did you '
                                'add a default "script" function?')
    try:
        current_configuration.set(conf)
        argh.dispatch_command(script_func, argv=script_args)
    finally:
        current_configuration.clear()
コード例 #5
0
def script(configuration, script_path, script_args):
    """Run a script managed by claw with the provided configuration
       as context."""
    conf = Configuration(configuration)
    if not conf.exists():
        raise NO_INIT
    if not os.path.isfile(script_path):
        for scripts_dir in settings.scripts:
            possible_script_path = scripts_dir / script_path
            possible_script_path2 = scripts_dir / '{0}.py'.format(script_path)
            if possible_script_path.isfile():
                script_path = possible_script_path
                break
            if possible_script_path2.isfile():
                script_path = possible_script_path2
                break
        else:
            raise argh.CommandError('Could not locate {0}'.format(script_path))
    exec_globs = exec_env.exec_globals(script_path)
    execfile(script_path, exec_globs)
    if script_args and script_args[0] in exec_globs:
        func = script_args[0]
        script_args = script_args[1:]
    else:
        func = 'script'
    script_func = exec_globs.get(func)
    if not script_func:
        raise argh.CommandError('Cannot find a function to execute. Did you '
                                'add a default "script" function?')
    try:
        current_configuration.set(conf)
        argh.dispatch_command(script_func, argv=script_args)
    finally:
        current_configuration.clear()
コード例 #6
0
    def test_command_multi(self, mock_call, mock_confirm, mock_ec2):
        from awstools.commands import ec2ssh

        command = ['remote' 'command']
        identifiers = ','.join([self.instances[0].id,
                                self.instances[1].id])

        mock_ec2.get_instances.return_value = self.instances
        mock_ec2.get_name = lambda x: x.id
        mock_ec2.filter_instances = lambda x, y: [y[0], y[1]]

        argv = [identifiers] + command
        argh.dispatch_command(ec2ssh.connect,
                              argv=argv,
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        mock_call.assert_any_call(
            ['ssh', self.instances[0].public_dns_name] + command,
        )

        mock_call.assert_any_call(
            ['ssh', self.instances[1].public_dns_name] + command,
        )

        self.assertTrue(mock_confirm.called)
コード例 #7
0
    def test_command_info(self):
        from awstools.commands import cloudformation

        argh.dispatch_command(cloudformation.info,
                              argv=[self.stacks[0].stack_name],
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )
コード例 #8
0
ファイル: cli.py プロジェクト: youngrok/july3
def main(rules_dict=None):
    if not rules_dict:
        caller_frame = inspect.stack()[1]
        rules_dict = dict(inspect.getmembers(inspect.getmodule(caller_frame[0])))

    for name, rule in rules_dict.items():
        if isinstance(rule, Rule):
            rules.register(name, rule)

    argh.dispatch_command(make)
コード例 #9
0
def main():
    @arg('filetypes', metavar='FILETYPE', nargs='+',  help='Included file types for generating targs')
    @arg('--rebuild', default=False, help='Force rebuild all tags at start')
    @arg('--ctags', default='ctags', help='Path to ctags program.(default: ctags)')
    def _watcher(args):
        from watchdog.observers import Observer 
        handler = CtagsTrick(filetypes=args.filetypes, ctags=args.ctags, rebuild=args.rebuild)
        observer = Observer(timeout=1.0)
        observe_with(observer, handler, ['.'], True) 
    dispatch_command(_watcher)
コード例 #10
0
ファイル: cli.py プロジェクト: youngrok/july3
def main(rules_dict=None):
    if not rules_dict:
        caller_frame = inspect.stack()[1]
        rules_dict = dict(
            inspect.getmembers(inspect.getmodule(caller_frame[0])))

    for name, rule in rules_dict.items():
        if isinstance(rule, Rule):
            rules.register(name, rule)

    argh.dispatch_command(make)
コード例 #11
0
def gin_wrap(fnc):
    def main(save_path, config, bindings=""):
        # You can pass many configs (think of them as mixins), and many bindings. Both ";" separated.
        gin.parse_config_files_and_bindings(config.split("#"), bindings.replace("#", "\n"))
        if not os.path.exists(save_path):
            logger.info("Creating folder " + save_path)
            os.system("mkdir -p " + save_path)
        run_with_redirection(os.path.join(save_path, "stdout.txt"),
                             os.path.join(save_path, "stderr.txt"),
                             fnc)(save_path)
    argh.dispatch_command(main)
コード例 #12
0
def main():
    @arg('LESS_DIR', help='Directory of less source files')
    @arg('CSS_DIR', help='Directory of generated css files')
    @arg('--lessc-path', default='lessc', help='Path to less compiler.(default: lessc)')
    def _watcher(args):
        "Watch changes in less directory and auto-compile modified file to css"
        from watchdog.observers import Observer 
        handler = LessTrick(src_dir=args.LESS_DIR, dest_dir=args.CSS_DIR, compiler=args.lessc_path)
        observer = Observer(timeout=1.0)
        observe_with(observer, handler, [args.LESS_DIR], True) 
    dispatch_command(_watcher)
コード例 #13
0
    def test_command_empty(self):
        from awstools.commands import ec2ssh

        argv = []
        argh.dispatch_command(ec2ssh.connect,
                              argv=argv,
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        self.assertIn('CommandError', self.stderr.getvalue())
コード例 #14
0
    def test_command_ls(self):
        from awstools.commands import cloudformation

        argh.dispatch_command(cloudformation.ls,
                              argv=[],
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        for stack in self.stacks:
            self.assertIn(stack.stack_name, self.stdout.getvalue())
コード例 #15
0
def main():
    """
    Dispatch 'run' command and break script with return code 1 and proper
    message in case of any exception.
    """
    try:
        dispatch_command(run)
    except Exception as e:
        print('WARNING - unhandled Exception: %s' % str(e))
        if os.getenv('CHECK_QUEUES_DEBUG'):
            import traceback
            traceback.print_exc()
        sys.exit(1)
コード例 #16
0
    def test_option_completion_script(self):
        from awstools.commands import ec2ssh

        argv = ['--completion-script']
        argh.dispatch_command(ec2ssh.connect,
                              argv=argv,
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        self.assertIn('_ec2ssh()', self.stdout.getvalue())
        self.assertIn('ec2ssh --completion-list', self.stdout.getvalue())
        self.assertIn('complete -F _ec2ssh ec2ssh', self.stdout.getvalue())
コード例 #17
0
ファイル: main.py プロジェクト: semond/html2md
def main():
    handler = StderrHandler()
    # handler.formatter = color_formatter
    handler.level = 2
    nullhandler = NullHandler()

    with nullhandler.applicationbound():
        with handler.applicationbound():
            with catch_exceptions(""):
                try:
                    dispatch_command(urltomarkdown)
                except SystemExit as e:
                    # catch_exceptions is a bit too catchy
                    pass
コード例 #18
0
    def test_option_completion_list(self, mock_read, mock_ec2):
        from awstools.commands import ec2ssh

        mock_read.return_value = ['instance_name', 'instance_name2']

        argv = ['--completion-list']
        argh.dispatch_command(ec2ssh.connect,
                              argv=argv,
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        self.assertIn(' '.join(mock_read.return_value), self.stdout.getvalue())
コード例 #19
0
 def func(args):
     self._set_paths()
     args = vars(args)
     for user_command in macro['commands']:
         user_command_name = user_command['name']
         user_command_args = functions.parse_parameters(
             loader=self,
             parameters=user_command.get('args', []),
             args=args)
         print '==> {0}: {1}'.format(user_command_name,
                                     user_command_args)
         user_command_func = self.user_commands[user_command_name]
         argh.dispatch_command(user_command_func,
                               argv=user_command_args)
コード例 #20
0
ファイル: loader.py プロジェクト: dankilman/clash
 def func(args):
     self._set_paths()
     args = vars(args)
     for user_command in macro['commands']:
         user_command_name = user_command['name']
         user_command_args = functions.parse_parameters(
             loader=self,
             parameters=user_command.get('args', []),
             args=args)
         print '==> {0}: {1}'.format(user_command_name,
                                     user_command_args)
         user_command_func = self.user_commands[user_command_name]
         argh.dispatch_command(user_command_func,
                               argv=user_command_args)
コード例 #21
0
def main():
    """
    Dispatch 'run' command and break script with return code 1 and proper
    message in case of any exception.
    """
    try:
        dispatch_command(run)
    except Exception as e:
        print "WARNING - unhandled Exception: %s" % str(e)
        if os.getenv("CHECK_QUEUES_DEBUG"):
            import traceback

            traceback.print_exc()
        sys.exit(1)
コード例 #22
0
    def test_command_single(self, mock_execvp, mock_ec2):
        from awstools.commands import ec2ssh

        command = ['remote', 'command']
        identifiers = self.instances[0].id

        mock_ec2.get_instances.return_value = self.instances
        mock_ec2.get_name = lambda x: x.id
        mock_ec2.filter_instances = lambda x, y: [y[0]]

        argv = [identifiers] + command
        argh.dispatch_command(ec2ssh.connect,
                              argv=argv,
                              output_file=self.stdout,
                              errors_file=self.stderr,
                              completion=False,
                              )

        mock_execvp.assert_called_once_with(
            'ssh',
            ['ec2ssh', self.instances[0].public_dns_name] + command,
        )
コード例 #23
0
def _dispatch_and_capture(func, command_string, **kwargs):
    if hasattr(command_string, 'split'):
        args = command_string.split()
    else:
        args = command_string

    io = make_IO()
    if 'output_file' not in kwargs:
        kwargs['output_file'] = io

    result = argh.dispatch_command(func, args, **kwargs)

    if kwargs.get('output_file') is None:
        return result
    else:
        io.seek(0)
        return io.read()
コード例 #24
0
    os.rename(downloaded_track, audio_track)


@argh.arg('-o', '--output-dir', help='Output directory')
def main(url, output_dir='.', client_id='8cc1ad31682003359858bcea08162fea'):
    '''Download the given track/playlist'''
    client = SoundCloudClient(client_id)
    print u'Reading URL...'
    response = client.request('resolve', url=url).json()

    if 'tracks' in response: # a playlist
        print u'Playlist has {} tracks'.format(len(response['tracks']))

        title = response['title']
        title = normalize(title)

        output_dir = os.path.join(output_dir, title)
        if not os.path.exists(output_dir):
            os.makedirs(output_dir)

        for track in response['tracks']:
            download_track(client, track, output_dir=output_dir)

    else:
        download_track(client, response, output_dir=output_dir)


if __name__ == '__main__':
    argh.dispatch_command(main)
コード例 #25
0
def dispatch():
    argh.dispatch_command(main)
コード例 #26
0
ファイル: debug.py プロジェクト: lowks/shared
def main():
    argh.dispatch_command(_main)
コード例 #27
0
ファイル: migrate_project.py プロジェクト: alphagov/ghtools
def main():
    dispatch_command(migrate_project)
コード例 #28
0
def main():
    argh.dispatch_command(timeit)
コード例 #29
0
ファイル: timeit.py プロジェクト: lowks/crate-devtools
def main():
    argh.dispatch_command(timeit)
コード例 #30
0
			try:
				cube = pydatacube.pcaxis.to_cube(data,
					origin_url=resource['url'])
			except pydatacube.pcaxis.PxSyntaxError, e:
				print >>sys.stderr, "Px parsing failed:", e
				continue
			try:
				pydatacube.sql.SqlDataCube.FromCube(con, id, cube, replace=True)
				con.commit()
			finally:
				con.close()
		except urllib2.HTTPError, e:
			print >>sys.stderr, "Download error", e, resource['package']['ckan_url']
			try:
				from urlparse import urlparse, parse_qs, urlunparse
				alternate = (r for r in resource['package']['resources'] if r['format'] == 'tietokanta').next()
				urlparts = urlparse(alternate['url'])
				url = parse_qs(urlparts.query)['bmark'][0].lower() + ".px"
				url = '/'.join(url.split('/')[1:])
				# WTF Guido, why is the URL API so damn horrible?
				url = urlunparse(list(urlparts[:2]) + [url] + ['']*3)
				print >>sys.stderr, "The right URL is probably %s"%(url,)
			except StopIteration:
				print >>sys.stderr, "'Tietokanta' link missing, can't guess the right url"
			print >>sys.stderr, ""
	

if __name__ == '__main__':
	import argh
	argh.dispatch_command(load_resources)
コード例 #31
0
import argh

def evaluate(save_path, checkpoint_name="weights.ckpt"):
    # Load config
    config = parse_gin_config(os.path.join(save_path, "config.gin"))
    gin.parse_config_files_and_bindings([os.path.join(os.path.join(save_path, "config.gin"))], bindings=[""])

    # Create dynamically dataset generators
    train, valid, test, meta_data = get_dataset(batch_size=config['train.batch_size'], seed=config['train.seed'])

    # Load model (a bit hacky, but necessary because load_from_checkpoint seems to fail)
    ckpt_path = os.path.join(save_path, checkpoint_name)
    ckpt = torch.load(ckpt_path)
    model = models.__dict__[config['train.model']]()
    summary(model)
    pl_module = SupervisedLearning(model, lr=0.0)
    pl_module.load_state_dict(ckpt['state_dict'])

    # NOTE: This fails, probably due to a bug in Pytorch Lightning. The above is manually doing something similar
    # ckpt_path = os.path.join(save_path, checkpoint_name)
    # pl_module = SupervisedLearning.load_from_checkpoint(ckpt_path)

    trainer = pl.Trainer()
    results, = trainer.test(model=pl_module, test_dataloaders=test, ckpt_path=ckpt_path)
    logger.info(results)
    with open(os.path.join(save_path, "eval_results_{}.json".format(checkpoint_name)), "w") as f:
        json.dump(results, f)

if __name__ == "__main__":
    argh.dispatch_command(evaluate)
コード例 #32
0
    # print np.arctan(abs(v1_exp/v2_exp))
    # print np.arctan(abs(c1_exp/c2_exp))
    # print np.angle(v1_exp/v2_exp)
    # print np.angle(c1_exp/c2_exp)

    ax2.plot(delta_tm, np.angle(v1_tm[:, 0]/v1_tm[:, 1]), ls="--", color=colors[0])
    ax2.plot(delta_tm, np.angle(v2_tm[:, 0]/v2_tm[:, 1]), ls="--", color=colors[1])
    ax2.set_xlabel(r"$\delta\cdot W$")

    # ax3.set_title(r"$\lambda$")
    # ax3.plot(c, (ev1_abs * np.exp(1j*ev1_phi)).real, marker="", ls="-", color=colors[0], clip_on=False)
    # ax3.plot(c, (ev1_abs * np.exp(1j*ev1_phi)).imag, marker="", ls="--", color=colors[0], clip_on=False)
    # ax3.plot(c, (ev2_abs * np.exp(1j*ev2_phi)).real, marker="", ls="-", color=colors[1], clip_on=False)
    # ax3.plot(c, (ev2_abs * np.exp(1j*ev2_phi)).imag, marker="", ls="--", color=colors[1], clip_on=False)
    # ax3.set_xlabel(r"$\delta\cdot W$")

    for a in (ax1, ax2): #, ax3):
        a.set_xlim(-3.1, 2.05)

    # plt.subplots_adjust(top=0.5)
    plt.tight_layout()

    if not save:
        plt.show()
    else:
        plt.savefig("coefficients.png")


if __name__ == '__main__':
    argh.dispatch_command(plot_coefficients)
コード例 #33
0
                 color=colors[1], label=r"$|\alpha_-(t)|^2$")
    ax1.semilogy(t, np.abs(cp)**2, ls="--",
                 color=colors[2], label=r"$|c_+(t)|^2$")
    ax1.semilogy(t, np.abs(cm)**2, ls="--",
                 color=colors[3], label=r"$|c_-(t)|^2$")
    ax1.legend(loc="lower right")
    ax1.set_xlabel(r"$t$")
    m = [(abs(x)**2).max() for x in Psi, cp, cm]
    ax1.set_ylim(1e-3, max(m))

    omega, g = OM.get_cycle_parameters(t)
    # np.savetxt("parameters_{}.dat".format(fignum), zip(g, omega))

    ax2 = plt.axes([0.2, 0.65, .2, .2])
    ax2.plot(gamma/2, 0, "ko")
    ax2.plot(g, omega, ls="-", color=colors[0])
    ax2.set_xlim(gamma/4, 3/4.*gamma)
    ax2.set_ylim(-gamma/4., gamma/4.)

    if show:
        plt.show()
    else:
        plt.savefig("{}.png".format(fignum))


if __name__ == '__main__':
    print "Warning: is normalization symmetric?"

    import argh
    argh.dispatch_command(plot_figures)
コード例 #34
0
ファイル: steady_state.py プロジェクト: bjodah/chemreac
            c = (1.0-c, .5-c/2, .5-c/2)  # over time: dark red -> light red

            plt.subplot(4, 1, 1)
            _plot(Cout[i, :, 0], c, 'Simulation (N={})'.format(rd.N),
                  apply_exp_on_y=logy)

            plt.subplot(4, 1, 2)
            _plot(Cref[i, :, 0], c, 'Analytic', apply_exp_on_y=logy)

            ax_err = plt.subplot(4, 1, 3)
            plot_solver_linear_error(integr, Cref, ax_err, ti=i,
                                     bi=slice(None),
                                     color=c, fill=(i == 0))
            plt.title('Linear rel error / Log abs. tol. (={})'.format(
                      info['atol']))

        plt.subplot(4, 1, 4)
        tspan = [tout[0], tout[-1]]
        plt.plot(tout, rmsd[:, 0] / info['atol'], 'r')
        plt.plot(tspan, [ave_rmsd_over_atol[0]]*2, 'r--')

        plt.xlabel('Time / s')
        plt.ylabel(r'$\sqrt{\langle E^2 \rangle} / atol$')
        plt.tight_layout()
        plt.show()
    return tout, Cout, info, ave_rmsd_over_atol, rd


if __name__ == '__main__':
    argh.dispatch_command(integrate_rd, output_file=None)
コード例 #35
0
ファイル: rl_runner.py プロジェクト: Pandolph/minigo
import subprocess

import argh
from utils import timer

BUCKET_NAME = os.environ['BUCKET_NAME']


def loop(working_dir='estimator_working_dir'):
    """Run train and validate as subprocesses."""
    flags = [
        '--working_dir',
        working_dir,
        '--bucket_name',
        BUCKET_NAME,
    ]
    while True:
        print("==================================")
        with timer("Train"):
            train = subprocess.call(['python' 'rl_loop.py', 'train'] + flags)
            if train != 0:
                print("Skipping validation")
                continue

        with timer("validate"):
            subprocess.call(['python', 'rl_loop', 'validate'] + flags)


if __name__ == '__main__':
    argh.dispatch_command(loop)
コード例 #36
0
    ax2.set_xlim(-0.85, 0.85)
    ax2.set_ylim(-1.15, 1.05)
    ax2.set_xlabel(r"$p_1$")
    ax2.set_ylabel(r"$p_2$")
    ax2.locator_params(axis='x', nbins=4)
    ax2.locator_params(axis='y', nbins=4)

    for ax in (ax1, ax2):
        ax.spines['right'].set_visible(False)
        ax.spines['top'].set_visible(False)
        ax.xaxis.set_ticks_position('bottom')
        ax.yaxis.set_ticks_position('left')
        ax.get_xaxis().set_tick_params(direction='out')
        ax.get_yaxis().set_tick_params(direction='out')

    ax1.annotate('a', (1.1, 0.1), textcoords='data',
                 weight='bold', size=12, color='black')
    ax2.annotate('b', (0.7, 1.0), textcoords='data',
                 weight='bold', size=12, color='black')

    plt.tight_layout()
    if show:
        plt.show()
    else:
        # plt.savefig("Fig2_alt.png", bbox_inches='tight')
        plt.savefig("Fig2_alt.pdf", bbox_inches='tight')


if __name__ == '__main__':
    argh.dispatch_command(plot_parameter_trajectory_p1_p2)
コード例 #37
0
ファイル: reindex.py プロジェクト: seut/cr8
def main():
    argh.dispatch_command(reindex)
コード例 #38
0
                         np.linspace(y_min, y_max, grid_size),
                         indexing='ij')
    flatten = lambda m: np.array(m).reshape(-1, )

    result = []
    for (i, j) in itertools.product(range(grid_size), range(grid_size)):
        point = np.array([xx[i, j], yy[i, j]]).reshape(1, 2)
        result.append(predictor.predict(point))

    Z = np.array(result).reshape(xx.shape)

    plt.contourf(xx,
                 yy,
                 Z,
                 cmap=cm.Paired,
                 levels=[-0.001, 0.001],
                 extend='both',
                 alpha=0.8)
    plt.scatter(flatten(X[:, 0]),
                flatten(X[:, 1]),
                c=flatten(y),
                cmap=cm.Paired)
    plt.xlim(x_min, x_max)
    plt.ylim(y_min, y_max)
    plt.savefig(filename)


if __name__ == "__main__":
    logging.basicConfig(level=logging.ERROR)
    argh.dispatch_command(example)
コード例 #39
0
            np.savez("potential_imag_xy.npz", X=X, Y=Y, P=imag_vector,
                     X_nodes=p.xnodes, Y_nodes=p.ynodes,
                     sigmax=sigmax, sigmay=sigmay)

    if shape == 'RAP':
        xi_lower, xi_upper = p.WG.get_boundary(theta=theta, smearing=smearing,
                                               boundary_phase=boundary_phase)
        # set last element to 0 (xi_lower) or W (xi_upper)
        print "WARNING: end of boundary not set zero!"
        # xi_lower[-1] = 0.0
        # xi_upper[-1] = W
        np.savetxt("upper.boundary", zip(xrange(p.nx), xi_upper))
        np.savetxt("lower.boundary", zip(xrange(p.nx), xi_lower))
        eps, delta = p.WG.get_cycle_parameters()
        np.savetxt("boundary.eps_delta", zip(eps, delta))
    if shape == 'RAP_TQD':
        eps_prime, delta_prime, theta_prime = p.WG.get_quantum_driving_parameters()
        xi_lower, xi_upper = p.WG.get_boundary(eps=eps_prime, delta=delta_prime,
                                               theta=theta_prime,
                                               smearing=smearing)
        # set last element to 0 (xi_lower) or W (xi_upper)
        xi_lower[-1] = 0.0
        xi_upper[-1] = W
        np.savetxt("upper.boundary", zip(xrange(p.nx), xi_upper))
        np.savetxt("lower.boundary", zip(xrange(p.nx), xi_lower))
        np.savetxt("boundary.eps_delta_theta", zip(eps_prime, delta_prime, theta_prime))


if __name__ == '__main__':
    argh.dispatch_command(write_potential)
コード例 #40
0
def main():
    argh.dispatch_command(find_perf_regressions)
コード例 #41
0
ファイル: van_der_pol.py プロジェクト: bjodah/pycvodes
    f, j = get_f_and_j(mu)
    if nt > 1:
        tout = np.linspace(t0, tend, nt)
        yout, nfo = integrate_predefined(
            f, j, [u0, v0], tout, dt0, atol, rtol, nsteps=nsteps,
            check_indexing=False, method=method)
    else:
        tout, yout, nfo = integrate_adaptive(
            f, j, [u0, v0], t0, tend, dt0, atol, rtol, nsteps=nsteps,
            check_indexing=False, method=method)  # dfdt[:] also for len == 1
    if verbose:
        print(nfo)
    if plot:
        import matplotlib.pyplot as plt
        plt.plot(tout, yout[:, 1], 'g--')
        plt.plot(tout, yout[:, 0], 'k-', linewidth=2)
        if savefig == 'None':
            plt.show()
        else:
            plt.savefig(savefig, dpi=dpi)


if __name__ == '__main__':
    try:
        import argh
        argh.dispatch_command(integrate_ivp)
    except ImportError:
        import warnings
        warnings.warn("Ignoring parameters, install argh to fix.")
        integrate_ivp()
コード例 #42
0
    # Find experiment log
    experiment_base = onlyone(data_dir.glob(f"**/" + session['log_folder']))

    #print(experiment_log)
    #print(pupil_base)
    #print(export_file)
    #
    return sync_and_merge_session(export_file, pupil_base, experiment_base)


def sync_and_merge(output):
    data = []
    for pid in session_index:
        print("Processing participant", pid)
        d = sync_and_merge_participant(pid)
        d['participant_id'] = int(pid)
        data.append(d)

    data = pd.concat(data)
    data.to_parquet(output)


if __name__ == '__main__':
    import argh
    import sys
    #argh.dispatch_command(sync_and_merge_participant)
    argh.dispatch_command(sync_and_merge)
    #argh.dispatch_command(get_marker_positions)
    #get_marker_positions(sys.argv[1])
コード例 #43
0
    memratio = kwargs['memratio']
    confpath = kwargs['confpath']
    avgmem = kwargs['avgmem']
    
    LOGGER.info("Memory slice or ratio to be used by PHP-FPM is %s (%s%% of the total memory available.)", memratio, '%d' % (memratio * 100))
    LOGGER.info("Calculating max_children for a %sM (in average) memory footprint per thread...", avgmem)

    max_children = 5
    
    if avgmem > 0:
        max_children = get_php_fpm_memory(memratio) / avgmem
        if max_children < 5:
            LOGGER.warning("Calculated max_children value of %s is too low. Try to lower the PHP application's memory footprint, or consider adding more memory to the box.", '%d' % max_children)
    
    conf_file = fileinput.FileInput(confpath, inplace=True)
    LOGGER.info("Writing pm.max_children value of %s to %s...", '%d' % max_children, confpath)
    
    for line in conf_file:
        line = re.sub(r"^pm.max_children = .*$", 'pm.max_children = ' + '%d' % max_children + ' ;Modified by /opt/beamly/scripts/php/fpm_dimensioning.py (upstart)' , line.rstrip())
        print(line)
    LOGGER.info("Done dimensioning pm.max_children.")

if __name__ == "__main__":
    FORMAT = "%(asctime)-15s : %(levelname)-8s : %(message)s"
    logging.basicConfig(format=FORMAT, level=logging.INFO)

    # overwrite config file
    argh.ArghParser()
    argh.dispatch_command(write_max_children_config)
コード例 #44
0
ファイル: enmako.py プロジェクト: bjodah/chemreac
    e.g. '{"title": "Welcome"}'
    or a shell command (shell_cmd_subs) which outputs lines of form:
    title=Welcome. If outpath is not given, template_path will be stripped
    from trailing `.mako` (required in that case)

    Note: json does not support integer keys in dicts
    """
    if outpath is None:
        assert template_path.endswith('.mako')
        outpath = template_path[:-5]
    subsd = {}
    if gen_subsd_eval:
        subsd.update(eval(gen_subsd_eval))
    if json_subs:
        import json
        subsd.update(json.load(open(json_subs, 'rt')))
    if pickle_subs:
        try:
            import cPickle as pickle
        except ImportError:
            import pickle
        subsd.update(pickle.load(open(pickle_subs, 'rt')))
    if shell_cmd_subs:
        import subprocess
        outp = subprocess.check_output(shell_cmd_subs.split())
        subsd.update(dict([x.split('=') for x in outp.split('\n')[:-1]]))
    render_mako_template_to(template_path, outpath, subsd)

if __name__ == '__main__':
    argh.dispatch_command(enmako)
コード例 #45
0
def main():
    argh.dispatch_command(run_spec)
コード例 #46
0
ファイル: insert_fake_data.py プロジェクト: mfussenegger/cr8
def main():
    argh.dispatch_command(insert_fake_data)
コード例 #47
0
        ax.set_xlabel("epsilon")
        ax.set_ylabel("delta")
        if limits:
            print limits
            ax.set_xlim(limits[0], limits[1])
            ax.set_ylim(limits[2], limits[3])
        else:
            ax.set_xlim(eps.min(), eps.max() + 2*xoffset)
            ax.set_ylim(delta.min(), delta.max() + 2*yoffset)

        f.colorbar(im, ax=ax)

        plt.setp(ax.xaxis.get_majorticklabels(), rotation=45)
        plt.subplots_adjust(top=0.875)

        if trajectory:
            # n, eps, delta = np.loadtxt(trajectory, unpack=True)[:3]
            eps, delta = np.loadtxt(trajectory, unpack=True)[:2]
            plt.plot(eps, delta, "r-")
            # for n, (eps, delta) in enumerate(zip(eps, delta)):
            #     plt.text(eps, delta, str(n), fontsize=12)

        if png:
            plt.savefig(png)
        else:
            plt.show()


if __name__ == '__main__':
    argh.dispatch_command(plot_3D_spectrum)
コード例 #48
0
    """reconstruct from channel data
    """
    opts = loadOptions(opts_path)
    # normalize paths according to the platform
    opts['extra']['src_dir'] =\
        os.path.expanduser(os.path.normpath(opts['extra']['src_dir']))
    opts['extra']['dest_dir'] =\
        os.path.expanduser(os.path.normpath(opts['extra']['dest_dir']))
    # load data from hdf5 files
    ind = opts['load']['EXP_START']
    if opts['load']['EXP_END'] != -1 and\
       opts['load']['EXP_END'] != ind:
        notifyCli('WARNING: multiple experiments selected. '
                  'Only the first dataset will be processed')
    chn_data, chn_data_3d = load_hdf5_data(
        opts['extra']['dest_dir'], ind)
    if opts['unpack']['Show_Image'] != 0:
        notifyCli('Currently only Show_Image = 0 is supported.')
    # initialize pyCuda environment
    cuda.init()
    dev = cuda.Device(0)
    ctx = dev.make_context()
    reImg = reconstruction_3d(chn_data_3d, opts['recon'])
    ctx.pop()
    del ctx
    save_reconstructed_image(reImg, opts['extra']['dest_dir'],
                             ind, 'tiff', '_3d')

if __name__ == '__main__':
    argh.dispatch_command(reconstruct)
コード例 #49
0
def main():
    argh.dispatch_command(insert_fake_data)
  fig5 = fig.add_subplot(235, projection='3d')
  fig5.plot_surface(xx, yy, zzAreaI, rstride=5, cstride=5, cmap=IMap, alpha=0.5,zorder=11.0,vmin=zzAreaI.min(), vmax=zzAreaI.max())
 # fig5.contourf(xx, yy, zzAreaI, zdir='z', offset=zmin, cmap=IMap, vmin=-1, vmax=1,zorder=1.0)
  fig5.set_zlim(zzAreaI.min(),zzAreaI.max())
  fig5.set_title("e) Area under Im$(e^{-i\Phi_0}A_0(t-\Delta t)+A_1(t))$",fontsize=fs)
  fig5.set_ylabel("$\Delta t$ in ns",fontsize=fs)
  fig5.set_xlabel("$\Phi_0/\pi$",fontsize=fs)

  fig3 = fig.add_subplot(236, projection='3d')
  fig3.plot_surface(xx, yy, zzArea2, rstride=5, cstride=5, cmap=AMap, alpha=0.5,zorder=11.0,vmin=zzArea2.min(), vmax=zzArea2.max())
#  fig3.contourf(xx, yy, zzArea2, zdir='z', offset=zmin, cmap=AMap, vmin=0, vmax=1,zorder=1.0)
  fig3.set_zlim(zzArea2.min(),zzArea2.max())
  fig3.set_title("f) Area under $|e^{-i\Phi_0}A_0(t-\Delta t)+A_1(t)|^2$",fontsize=fs)
  fig3.set_ylabel("$\Delta t$ in ns",fontsize=fs)
  fig3.set_xlabel("$\Phi_0/\pi$",fontsize=fs)

  plt.show()

  ### plotting


### main routine #########################################################################
##########################################################################################


if __name__ == '__main__':
    argh.dispatch_command(main_routine)  



コード例 #51
0
ファイル: cli.py プロジェクト: cscutcher/tmux_cssh
def tmux_cssh():
    '''
    CLI Dispatcher for tmux_ssh
    '''
    argh.dispatch_command(clustered_ssh)
コード例 #52
0
ファイル: __main__.py プロジェクト: ekimekim/factoriocalc
import logging
from factoriocalc.main import main

import argh

logging.basicConfig(level=logging.DEBUG)
argh.dispatch_command(main)
コード例 #53
0
ファイル: cli.py プロジェクト: cscutcher/tmux_cssh
def tmux_cluster():
    '''
    CLI dispatcher for tmux_cluster
    '''
    argh.dispatch_command(clustered_window)
コード例 #54
0
ファイル: theme2pak.py プロジェクト: smedstadc/theme2pak
def get_pak_line_from_theme_line(line, base_path):
    logger.debug("in get_pak_line_from_theme_line with line={}, base_path={}".format(repr(line), repr(base_path)))

    if line is not None and line[0] == '!':
        emote_filename = line.split()[1]
        emote_bbcode = line.split()[2]
        emote_size = get_emote_image_dimensions(os.path.join(base_path, emote_filename))

        if emote_size:
            emote_width, emote_height = get_emote_image_dimensions(os.path.join(base_path, emote_filename))
            return "'{0}', '{1}', '{2}', '0', '{3}', '{4}',\n".format(emote_filename, emote_width,
                                                                      emote_height, emote_filename.split('.')[0],
                                                                      emote_bbcode)
        else:
            return None


def get_emote_image_dimensions(path):
    logger.debug("in get_emote_image_dimensions with path={}".format(repr(path)))
    try:
        with Image.open(path) as emote_image:
            return emote_image.size
    except IOError as e:
        logger.error("Problem reading image {}".format(repr(path)))
    except Exception as e:
        logger.error(e)


if __name__ == "__main__":
    argh.dispatch_command(convert_theme)
コード例 #55
0
            plotter(name)(f)(df)

    if run_trellis:
        for name, f in TRELLIS:
            plotter(name)(f)(df)


def line_plot(df, xaxis):
    logging.info("Slice plot of \n%s", df.describe())
    for key, group in df.groupby('algorithm'):
        plt.plot(group[xaxis], group['min_time_ns'], label=key)
    plt.xlabel(xaxis)
    plt.ylabel('Time (ns)')
    plt.legend(loc='best')


def trellis_plot(df):
    logging.info("Trellis plot of \n%s", df.describe())
    plot = rplot.RPlot(df, x='num_trees', y='min_time_ns')
    plot.add(rplot.TrellisGrid(['num_features', 'depth']))
    plot.add(rplot.GeomPoint(
        colour=rplot.ScaleRandomColour('algorithm'),
        alpha=0.8,
        size=50))
    plot.render(plt.gcf())


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)
    argh.dispatch_command(plots)
コード例 #56
0
        - Concrete version like "0.55.0" or with wildcard: "1.1.x"
        - An alias (one of [latest-nightly, latest-stable, latest-testing])
        - A URI pointing to a CrateDB tarball (in .tar.gz format)
        - A URI pointing to a checked out CrateDB repo directory

    run-crate supports command chaining. To launch a CrateDB node and another
    sub-command use:

        cr8 run-crate <ver> -- timeit -s "select 1" --hosts '{node.http_url}'

    To launch any (blocking) subprocess, prefix the name with '@':

        cr8 run-crate <version> -- @http '{node.http_url}'

    If run-crate is invoked using command chaining it will exit once all
    chained commands finished.

    The postgres host and port are available as {node.addresses.psql.host} and
    {node.addresses.psql.port}
    """
    with create_node(version, env, setting, crate_root, keep_data) as n:
        try:
            n.start()
            n.process.wait()
        except KeyboardInterrupt:
            print('Stopping Crate...')


if __name__ == "__main__":
    argh.dispatch_command(run_crate)
            Chi_0_eff_0 = np.outer(Chi_0_eff[:,0], np.sin(np.pi*y))
            Chi_0_eff_1 = np.outer(Chi_0_eff[:,1]*np.exp(-1j*WGn.kr*xnn),
                                np.sqrt(WGn.k1/WGn.k0)*np.sin(2*np.pi*y))
            Chi_0_eff = Chi_0_eff_0 + Chi_0_eff_1

            Chi_1_eff_0 = np.outer(Chi_1_eff[:,0], np.sin(np.pi*y))
            Chi_1_eff_1 = np.outer(Chi_1_eff[:,1]*np.exp(-1j*WGn.kr*xnn),
                                np.sqrt(WGn.k1/WGn.k0)*np.sin(2*np.pi*y))
            Chi_1_eff = Chi_1_eff_0 + Chi_1_eff_1

        Chi_0_eff, Chi_1_eff = [ c.T for c in Chi_0_eff, Chi_1_eff ]

        ID = "n_{:03}_xn_{:08.4f}_epsn_{}_deltan_{}".format(n, xn, epsn, deltan)
        print ID, WGn.kr
        part = np.abs
        f, (ax1, ax2) = plt.subplots(nrows=2)

        ax1.pcolormesh(X, Y, part(Chi_0_eff), vmin=np.abs(Chi_0_eff).min(), vmax=np.abs(Chi_0_eff).max())
        ax1.contour(X, Y, np.real(Chi_0_eff), levels=[0.0], colors='k', linestyles="solid")
        ax1.contour(X, Y, np.imag(Chi_0_eff), levels=[0.0], colors='w', linestyles="dashed")

        ax2.pcolormesh(X, Y, part(Chi_1_eff), vmin=np.abs(Chi_1_eff).min(), vmax=np.abs(Chi_1_eff).max())
        ax2.contour(X, Y, np.real(Chi_1_eff), levels=[0.0], colors='k', linestyles="solid")
        ax2.contour(X, Y, np.imag(Chi_1_eff), levels=[0.0], colors='w', linestyles="dashed")

        plt.savefig(ID + ".png")


if __name__ == '__main__':
    argh.dispatch_command(get_loop_eigenfunction)