def rapids_time(chord, time): """ For rapid play, we create a rapid passage based on a number of notes in the chord. """ bpm = random() * 40. + 60. tuplet = len(chord) time_between_events = 60. / (bpm * tuplet) time_total = tuplet * time_between_events if time_total > time: time = time_total event_times = frange(0., time_total, time_between_events) return (event_times, time)
def fit(self, X, y, min_h=0.1, max_h=2., step_h=0.15): LOO_min = self._loo(X=X,y=y,h=self.__h) h_opt = self.__h res_h = [] res_loo = [] for h in cmn.frange(min_h, max_h, step_h): LOO = self._loo(X=X,y=y,h=h) res_h.append(h) res_loo.append(LOO) if LOO < LOO_min: LOO_min = LOO h_opt = h self.__h = h_opt return {'h_opt': h_opt, 'h': res_h, 'LOO': res_loo}
def main(args): print('') print('working on: {}'.format(args.graph)) graph = common.load_graph(args.graph) print('graph ({},{})'.format(len(graph.nodes), len(graph.edges))) if args.remap: print('') print('remapping:') graph, mapping = common.remap(graph) for k, v in mapping.items(): print(' {} -> {}'.format(v, k)) print('rounds: {}'.format(args.rounds)) print('steps: {}'.format(args.steps)) print('shots: {}'.format(args.shots)) beta_vals = [ value for value in common.frange( 0.0, args.sample_range_scale * pi, args.steps, include_start=False) ] gamma_vals = [ value for value in common.frange(0.0, args.sample_range_scale * 2.0 * pi, args.steps, include_start=False) ] print('beta: {}'.format(beta_vals)) print('gamma: {}'.format(gamma_vals)) names = [] values = {} for r in range(args.rounds): bn = beta_template.format(r) names.append(bn) values[bn] = beta_vals gn = gamma_template.format(r) names.append(gn) values[gn] = gamma_vals best_config = None best_config_value = 0 for config in common.dfs(names, values, {}): #print(config) num_bits = graph.max_node qp = QuantumProgram() qr = qp.create_quantum_register('qr', num_bits) cr = qp.create_classical_register('cr', num_bits) qc = qp.create_circuit('qaoa', [qr], [cr]) for i in range(num_bits): qc.h(qr[i]) for r in range(args.rounds): beta = config[beta_template.format(r)] gamma = config[gamma_template.format(r)] for i in range(num_bits): qc.u3(2 * beta, -pi / 2, pi / 2, qr[i]) for e in graph.edges: qc.x(qr[e.fr]) qc.u1(-gamma / 2.0, qr[e.fr]) qc.x(qr[e.fr]) qc.u1(-gamma / 2.0, qr[e.fr]) qc.cx(qr[e.fr], qr[e.to]) qc.x(qr[e.to]) qc.u1(gamma / 2.0, qr[e.to]) qc.x(qr[e.to]) qc.u1(-gamma / 2.0, qr[e.to]) qc.cx(qr[e.fr], qr[e.to]) qc.measure(qr, cr) result = qp.execute(['qaoa'], backend='local_qasm_simulator', shots=args.shots) # Show the results #print(result) data = result.get_data('qaoa') #print(data['counts']) ec = common.expected_cut(graph, data['counts']) #print(ec) #print(result.get_ran_qasm('qaoa')) if ec > best_config_value: best_config = config best_config_value = ec print('') print('new best: {}'.format(best_config)) print('expected cut: {}'.format(best_config_value)) print('counts: {}'.format(data['counts'])) else: sys.stdout.write('.') sys.stdout.flush() #print(nodes) #print(edges) # print_err('loading: {}'.format(args.sample_data)) # with open(args.sample_data) as file: # data = json.load(file) # for solution_data in data['solutions']: # row = [solution_data['num_occurrences']] + solution_data['solution'] # print(', '.join([str(x) for x in row])) json_config = { 'steps': args.steps, 'expected_cut': best_config_value, 'rounds': [] } rounds = json_config['rounds'] for r in range(args.rounds): beta = config[beta_template.format(r)] gamma = config[gamma_template.format(r)] rounds.append({'beta': beta, 'gamma': gamma}) config_file = args.graph.replace('.qx', '_config_{:02d}.json'.format(args.rounds)) print('write: {}'.format(config_file)) with open(config_file, 'w') as file: file.write(json.dumps(json_config, **common.json_dumps_kwargs))
def draw_loop(parser, start, end, step, x_exp, y_exp): for i in common.frange(start, end+step, step): parser.param['T'] = i x, y = calc_coord(x_exp, y_exp, parser.scale_x, parser.scale_y, parser.rot_angle, parser.origin_x, parser.origin_y) draw_pixel(parser, x, y)
def submit_mc_jobs_htcondor(in_args=sys.argv[1:], log_dir=LOG_DIR): """ Main function. Sets up all the relevant directories, makes condor job and DAG files, then submits them if necessary. """ # Handle user options. The user can pass all the same options as they # would if running the program locally. However, for certain options, # we interecpt the argument(s) and modify if necessary. parser = argparse.ArgumentParser(description=__doc__) # Options for the job submitter parser.add_argument("jobIdRange", help="Specify job ID range to run over. The ID is used" " as the random number generator seed, so manual " "control is needed to avoid making the same files. " "Must be of the form: startID, endID. ", nargs=2, type=int) # no metavar, bug with positional args parser.add_argument("--oDir", help="Directory for output HepMC files. " "If no directory is specified, an automatic one will " "be created at: " "/hdfs/user/<username>/NMSSMPheno/Pythia8/<energy>TeV/<card>/<date>", default="") parser.add_argument("--exe", help="Executable to run.", default="generateMC.exe") parser.add_argument("--massRange", help="Specify mass range to run over. " "Must be of the form: startMass, endMass, massStep. " "For each mass point, njobs jobs will be submitted. " "This will superseed any --mass option passed via --args", nargs=3, type=float, metavar=('startMass', 'endMass', 'massStep')) # All other program arguments to pass to program directly. parser.add_argument("--args", help="All other program arguments. " "You MUST specify this after all other options", nargs=argparse.REMAINDER) # Some generic script options parser.add_argument("--dry", help="Dry run, don't submit to queue.", action='store_true') parser.add_argument("-v", help="Display debug messages.", action='store_true') args = parser.parse_args(args=in_args) log.info('>>> Creating jobs') if args.v: log.setLevel(logging.DEBUG) log.debug('program args: %s', args) # Do some sanity checks check_args(args) # Get the input card from user's options & check it exists try: card = get_option_in_args(args.args, "--card") except KeyError: log.exception('You did not specify an input card!') if not card: raise RuntimeError('You did not specify an input card!') if not os.path.isfile(card): raise RuntimeError('Input card %s does not exist!' % card) args.card = card args.channel = os.path.splitext(os.path.basename(card))[0] # Make sure output zipped if '--zip' not in args.args: args.args.append("--zip") # Get CoM energy try: args.energy = int(get_option_in_args(args.args, '--energy')) except KeyError: args.energy = 13 # Loop over required mass(es), generating DAG files for each if args.massRange: masses = common.frange(args.massRange[0], args.massRange[1], args.massRange[2]) else: masses = [get_option_in_args(args.args, '--mass')] status_files = [] for mass in masses: # Auto generate output directory if necessary if args.oDir == "": args.oDir = generate_dir_soolin(args.channel, args.energy, mass) log.info('Auto setting output dir to %s', args.oDir) # Setup log directory log_dir = '%s/%s/logs' % (log_dir, generate_subdir(args.channel, args.energy, mass)) # File stem common for all dag and status files file_stem = '%s/py8_%s' % (generate_subdir(args.channel, args.energy, mass), strftime("%H%M%S")) # Make DAGMan status_name = file_stem + '.status' status_files.append(status_name) pythia_dag = create_dag(dag_filename=file_stem + '.dag', condor_filename='HTCondor/pythia.condor', status_filename=status_name, log_dir=log_dir, mass=mass, args=args) # Submit it if args.dry: log.warning('Dry run - not submitting jobs or copying files.') pythia_dag.write() else: pythia_dag.submit() if len(status_files) > 1: log.info('Check all statuses with:') log.info('DAGstatus.py %s', ' '.join(status_files)) return 0