Пример #1
0
    def __init__(self, image_size):

        self.generator_g = Generator().cuda()
        self.generator_f = Generator().cuda()
        self.discriminator_c = Discriminator(image_size,
                                             num_input_channels=3).cuda()
        self.discriminator_t = Discriminator(image_size,
                                             num_input_channels=1).cuda()

        self.content_criterion = ContentLoss().cuda()
        self.tv_criterion = TVLoss().cuda()
        self.color_criterion = nn.BCEWithLogitsLoss().cuda()
        self.texture_criterion = nn.BCEWithLogitsLoss().cuda()

        betas = (0.5, 0.999)
        self.g_optimizer = optim.Adam(lr=1e-4,
                                      params=self.generator_g.parameters(),
                                      betas=betas)
        self.f_optimizer = optim.Adam(lr=1e-4,
                                      params=self.generator_f.parameters(),
                                      betas=betas)
        self.c_optimizer = optim.Adam(lr=4e-4,
                                      params=self.discriminator_c.parameters(),
                                      betas=betas)
        self.t_optimizer = optim.Adam(lr=4e-4,
                                      params=self.discriminator_t.parameters(),
                                      betas=betas)

        self.blur = GaussianBlur().cuda()
        self.gray = Grayscale().cuda()
Пример #2
0
    def draw(self, ax, time=None, cycles=1, **kwargs):
        """Draws the signals wave shape"""

        #if time is not provided draw the wave shape 'cycles' times
        if time is None:
            time = cycles / np.float64(self.freq)

            try:
                kwargs['density'] *= cycles
            except KeyError:
                kwargs['density'] = 100 * cycles

        Generator.draw(self, ax, time, **kwargs)
Пример #3
0
def wikenheiser2015_lookahead(goal_no,
                              start,
                              alpha,
                              tau,
                              goal_weight=20.,
                              goal_absorb=True,
                              backmove=False,
                              opt_policy_weight=0.9,
                              sigma=0.5,
                              n_samp=n_samp,
                              n_step=n_step):
    ENVlh = CircularTrack(n_state=n_state_circ,
                          start=start,
                          goal_no=goal_no,
                          goal_weight=goal_weight,
                          goal_absorb=goal_absorb,
                          backmove=backmove,
                          opt_policy_weight=opt_policy_weight)
    GENlh = Generator(ENV=ENVlh, symmetrize=symmetrize, jump_rate=jump_rate)
    PROPlh = Propagator(GEN=GENlh, sigma=sigma, tau=tau, alpha=alpha)
    EXPlh = Explorer(PROP=PROPlh, rho_init=start, no_dwell=no_dwell)
    EXPlh.sample_sequences(n_samp=n_samp, n_step=n_step)
    EXPlh.traj_width = 0
    EXPlh.start_pos = True
    EXPlh.state_msize = state_msize
    return EXPlh, PROPlh, GENlh, ENVlh
Пример #4
0
def main():
    import numpy as np
    options = parse_options()
    os.makedirs(options.dir, exist_ok=True)
    config_logger(options.dir)

    with open(options.config, 'r') as stream:
        try:
            opt = yaml.safe_load(stream)
        except yaml.YAMLError as exc:
            print(exc)
            logging.exception(exc)
            exit(1)

    gt_dir = f"{options.dir}/original"
    data_dir = f"{options.dir}/spoiled"
    export_dir = f"{options.dir}/export"
    os.makedirs(data_dir, exist_ok=True)
    os.makedirs(gt_dir, exist_ok=True)
    os.makedirs(export_dir, exist_ok=True)

    seed = opt.get('seed', None)
    if not seed:
        seed = random.randint(0, 2**32 - 1)
        opt['seed'] = seed
        opt['n_workers'] = options.workers
    logging.info(f"Starting generation with seed {seed} ")
    n_workers = opt.get('n_workers', None)
    n_workers = n_workers or options.workers  # ensure determinism

    filler = [None for _ in range(n_workers - (options.size % n_workers))]
    vals = np.concatenate([np.arange(options.size),
                           filler]).reshape(-1, n_workers).transpose()
    vals = [[
        f'{options.out_name}_{val:04}' for val in values if val is not None
    ] for values in vals]
    processes = []

    random.seed(seed)
    pbar = ProgressBar(options.size)

    def update_pbar(item):
        pbar.update(f"Generated {item}")

    for i in range(n_workers):
        seed = random.randint(0, 2**32 - 1)
        generator = Generator(opt)
        spoiler = Spoiler()
        exporters = from_options(opt, export_dir)
        visitors = [spoiler] + exporters
        p = Process(target=gen_image_pool,
                    args=(generator, visitors, vals[i], options, seed,
                          update_pbar))
        p.start()
        processes.append(p)
    for p in processes:
        p.join()

    with open(f"{options.dir}/config.yml", 'w') as f:
        yaml.dump(opt, f)
Пример #5
0
def photos_generator():
    images_info_loader = l.CSVLoader('data/images_train.csv',
                                     'data/images_test.csv')
    preprocessor = l.Preprocessor()
    preprocessor.with_pipeline('photo_stats').set_loader(
        images_info_loader.select_loader(['width', 'height', 'sharpness']))
    preprocessor.add_operation(l.LogTransform(['sharpness'
                                               ])).add_operation(l.ToNdarray())
    preprocessor.add_operation(preprocessing.StandardScaler())
    preprocessor.with_pipeline('images_origin').set_loader(
        images_info_loader.select_loader(['listing_id']), only_train=True)
    merger = l.PandasColumnMerger(['images_origin', 'interest_level'],
                                  on='listing_id',
                                  how='left')
    preprocessor.set_consumer(merger)
    preprocessor.with_pipeline('interest_level').set_loader(
        l.JSONLoader().select_loader(['listing_id', 'interest_level']),
        only_train=True)
    preprocessor.set_consumer(merger)
    preprocessor.with_pipeline('response').set_loader(
        merger, only_train=True).add_operation(l.Selector('interest_level'))
    preprocessor.add_operation(
        l.Dummifier(output_cols=['high', 'medium', 'low'])).add_operation(
            l.ToNdarray())
    preprocessor.with_pipeline('photo').set_loader(
        images_info_loader.select_loader(['listing_id', 'photo_name']))
    generator = Generator(preprocessor, {'photo': l.PhotoLoaderGenerator()},
                          l.SeparateKey('response'))
    return generator
Пример #6
0
 def __init__(self, src_vocab_size, trg_vocab_size, embed_size, hidden_size,
              dropout_p, src_emb_vectors, trg_emb_vectors):
     super(Seq2Seq, self).__init__()
     self.encoder = Encoder(src_vocab_size, embed_size, hidden_size,
                            dropout_p, src_emb_vectors)
     self.decoder = Decoder(trg_vocab_size, embed_size, hidden_size,
                            dropout_p, trg_emb_vectors)
     self.generator = Generator(trg_vocab_size, hidden_size, dropout_p)
Пример #7
0
	def draw(self, ax, time=1.0, **kwargs):
		"""
		Draws the shape of the output signal along with its 
		input
		
		The shape will be drawn for 'cycles' cycles of the input
		"""

		#draw the amplifiers output
		Generator.draw(self, ax, time, **kwargs)
		
		try:
			kwargs['alpha'] *= 0.5
		except KeyError:
			kwargs['alpha'] = 0.5
		
		#draw the amplifiers input
		self.input.draw(ax, time, **kwargs)
Пример #8
0
class GeneratorTest(unittest.TestCase):
    generator = Generator(TestSiteParser())

    def test_write_xml(self):
        out = StringIO.StringIO()
        self.generator.write_xml(out)
        self.assertIsNotNone(out.getvalue())

    def test_rfc2822time_generator(self):
        dt = datetime.fromtimestamp(1505237983)
        self.assertEqual("Tue, 12 Sep 2017 17:39:43 -0000", rfc2822time(dt))
Пример #9
0
    def draw(self, ax, time=1.0, **kwargs):
        """
		Draws the shape of the output signal along with its 
		inputs
		"""
        #draw the output signal
        Generator.draw(self, ax, time, **kwargs)

        #draw inputs' output signals
        for i in range(len(self.inputs)):
            try:
                kwargs['alpha'] *= 0.5
            except KeyError:
                kwargs['alpha'] = 0.5

            try:
                kwargs['scale'] *= self.levels[i]
            except KeyError:
                kwargs['scale'] = self.levels[i]

            self.inputs[i].draw(ax, time, **kwargs)
Пример #10
0
def circulartrack_lookahead(start,
                            alpha,
                            tau,
                            sigma=1,
                            n_samp=n_samp,
                            n_step=n_step):
    ENVlh = CircularTrack(n_state=n_state_circ, start=start, goal_no=None)
    GENlh = Generator(ENV=ENVlh, symmetrize=symmetrize, jump_rate=jump_rate)
    PROPlh = Propagator(GEN=GENlh, sigma=sigma, tau=tau, alpha=alpha)
    EXPlh = Explorer(PROP=PROPlh, rho_init=start, no_dwell=no_dwell)
    EXPlh.sample_sequences(n_samp=n_samp, n_step=n_step)
    EXPlh.traj_width = 0
    EXPlh.start_pos = True
    EXPlh.state_msize = state_msize
    return EXPlh, PROPlh, GENlh, ENVlh
Пример #11
0
    def generate(self, original_name, input_path, output_dir, options):
        options = {**self.defaults, **options}
        overlay_file = options["overlay"]

        if overlay_file not in OverlayGenerator.ALLOWED_OVERLAYS:
            raise ValueError("Unknown overlay " + overlay_file)
        overlay_name = OverlayGenerator.ALLOWED_OVERLAYS[overlay_file]

        overlay = Image.open(f"resources/{overlay_file}")
        emoji = self.load_image(input_path)
        emoji = emoji[0]
        emoji_name = Generator.get_emoji_name_from_file(original_name)

        frames = []
        emoji_w, emoji_h = emoji.size
        palette = None
        for i, overlay_frame in enumerate(ImageSequence.Iterator(overlay)):
            canvas = Image.new("RGBA", emoji.size, (255, 255, 255))

            if palette is None:
                palette = overlay_frame.getpalette()
            else:
                overlay_frame.putpalette(palette)

            # overlay_frame.save(f'../output/{overlay_name}.{i:02}.gif', 'GIF')
            # cropped_frame = fire_frame.crop((0, 0, emoji_w, emoji_h))
            overlay_frame.thumbnail(canvas.size)
            overlay_frame = overlay_frame.convert('RGBA')
            canvas.paste(emoji, (0, 0), mask=emoji)

            offset = ((canvas.width - overlay_frame.width) // 2,
                      (canvas.height - overlay_frame.height) // 2)
            if overlay_name == 'burning':
                offset = (0, emoji_h - overlay_frame.height + 5)

            canvas.paste(overlay_frame, offset, mask=overlay_frame)
            frames.append(canvas)

        return self.write_gif(frames, output_dir, emoji_name + ".gif",
                              options), f'{overlay_name}_{original_name}'
Пример #12
0
# SETTINGS - SIMULATOR/LEARNER/EXPLORER
no_dwell = False
n_samp = 1

# VISUALIZATION
state_msize = 5

# %% eigenvectors panels
ENV = RoomWorld(start=0, scale=scale_highres)
kernels = [
    3, 8, 23, 35, 9 * 8, 9 * 12 + 1, 16 * 12, 16 * 12 + 1, 17 * 12 + 1,
    19 * 12 - 4
]
ncols = len(kernels)
GEN = Generator(ENV=ENV,
                forward=forward,
                symmetrize=symmetrize,
                jump_rate=jump_rate)

GEN.plot_real_eigenvectors(kernels=kernels,
                           wrap_col=ncols,
                           title=False,
                           norm_scale=norm_scale)
if save_output:
    GEN.fig.savefig(
        os.path.join(figdir, fname_base + "_evecs.png"),
        dpi=300,
        bbox_inches="tight",
        pad_inches=0.1,
    )

# %% tau x t and sample panels
Пример #13
0
labelpad = 10
env_lw = 2.
jitter_state = False
vmax_prop = 0.05

# SAMPLING
ENV = OpenBox(scale=scale)
ENV.env_lw = env_lw
state_away_default = 26
states_away = [26, 48, 598, 576]
n_states_away = len(states_away)  # corners
state_home = 335  # slightly off center as in Pfeiffer2013
states_home = [state_home]
states_home = [310, 311, 336, 335]

GEN = Generator(ENV=ENV, symmetrize=symmetrize, jump_rate=jump_rate)
GENh = Generator(ENV=ENV, symmetrize=symmetrize, jump_rate=jump_rate)
GENh.highlight_states(states=states_home, weight=goal_weight)

PROPd = Propagator(GEN=GENh, tau=tau_diff, alpha=alpha_diff)
PROPs = Propagator(GEN=GENh, tau=tau_sdiff, alpha=alpha_sdiff)

# multi-start away explorers
EXPds = []
EXPss = []
PROPds = []
PROPss = []
for start in states_away:
    EXPd = Explorer(PROP=PROPd,
                    rho_init=start,
                    no_dwell=no_dwell,
Пример #14
0
jitter_std = 0.03
traj_width = 0.

# %% SIMULATIONS
n_x = 100
alphas = [alpha_shift, alpha_base]
taus = [tau_shift, tau_base]
# alpha_tau_combos = list(product(alphas,taus))
alpha_tau_combos = [(alpha_base, tau_base), (alpha_base, tau_shift),
                    (alpha_shift, tau_shift)]
n_alpha = len(alphas)
n_tau = len(taus)

ENV = LinearTrack(scale=n_state, goal=goal)
GEN = Generator(ENV=LinearTrack(scale=n_state, goal=goal),
                forward=forward,
                symmetrize=symmetrize,
                jump_rate=jump_rate)

evals = GEN.evals
x_state = np.linspace(0, 1, n_state)
x = np.linspace(0, 1, n_x)
comps = range(1, n_state + 1)
comps_rev = [c for c in reversed(comps)]
# generate pandas dataaset
iterables_prop = [alphas, taus, x]
iterables_eval = [alphas, taus, comps]
iterables_comp = [comps, x]
ix_prop = pd.MultiIndex.from_product(iterables_prop,
                                     names=['alpha', 'tau', 'x'])
ix_eval = pd.MultiIndex.from_product(iterables_eval,
                                     names=['alpha', 'tau', 'comp'])
Пример #15
0
                        help='number of samples to generate (default: 10000)')
    parser.add_argument('--no-cuda',
                        action='store_true',
                        default=False,
                        help='Disables GPU use')
    parser.add_argument('--out-path',
                        type=str,
                        default='./',
                        metavar='Path',
                        help='Path to output samples')
    args = parser.parse_args()
    args.cuda = True if not args.no_cuda and torch.cuda.is_available(
    ) else False

    if args.cp_path is None:
        raise ValueError(
            'There is no checkpoint/model path. Use arg --cp-path to indicate the path!'
        )

    model = Generator(100, [1024, 512, 256, 128], 3)

    ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
    model.load_state_dict(ckpt['model_state'])

    if args.cuda:
        model = model.cuda()

    print('Cuda Mode is: {}'.format(args.cuda))

    save_samples_no_grid(model, args.nsamples, args.cuda, args.out_path)
secondWord = df1['question_text'].str.split().str[1]
#secondWordFalse2 = dfFalse['question_text'].str.split().str[1]

TrueCount2 = secondWord.value_counts().index.tolist()[:10]
#FalseCount2 = secondWordFalse2.value_counts().index.tolist()[:10]

TrueCountNGram = []
for x in itertools.product(TrueCount, TrueCount2):
    k = ' '.join(x)
    TrueCountNGram.append(k)

numOfWords = 2000
tokenizerPos = Tokenizer(num_words=numOfWords)

gen = Generator(df1, TrueCount, TrueCountNGram, stop)
#genT = Generator(dfV)
X_input = gen.generate(6).__next__()
epochs = 5

model = lstm_model()

model.fit_generator(gen.generate(200),
                    epochs=epochs,
                    verbose=1,
                    steps_per_epoch=500)

model_yaml = model.to_yaml()
with open("model.yaml", "w") as yaml_file:
    yaml_file.write(model_yaml)
# serialize weights to HDF5
Пример #17
0
def data_preparation(jzdbtest_session, apply_prefix, apply_suffix, apply_code,
                     user_id, organ_id, net_id):
    # Create apply id and register the id
    apply_sn = SerialNumber(insert_time=datetime.now(),
                            update_time=datetime.now(),
                            operator_id=0,
                            delete_flag=0,
                            type=2,
                            prefix=apply_prefix,
                            suffix=apply_suffix,
                            number=apply_code)
    register_prefix, register_suffix, register_code = DataPreparation.create_serial_number(
        'RE', jzdbtest_session)
    reg_sn = SerialNumber(insert_time=datetime.now(),
                          update_time=datetime.now(),
                          operator_id=0,
                          delete_flag=0,
                          type=1,
                          prefix=register_prefix,
                          suffix=register_suffix,
                          number=register_code)

    # Create apply
    product_code = ['1006', '1007'][random.randint(0, 1)]
    customer = Generator.generate_customer()
    cr = DataPreparation.customer_register(register_code, customer,
                                           product_code, user_id, organ_id,
                                           net_id)
    apply = DataPreparation.customer_apply(apply_code, register_code, customer,
                                           product_code, user_id, organ_id,
                                           net_id)

    # Create user id information
    id_card = DataPreparation.customer_id_card(apply_code, customer)

    # Check files
    check_file_1 = DataPreparation.customer_check_file(apply_code, 1)
    check_file_2 = DataPreparation.customer_check_file(apply_code, 2)
    check_file_3 = DataPreparation.customer_check_file(apply_code, 3)

    # Customer basic information
    basic_info = DataPreparation.customer_application_info_pre(
        apply_code, customer)

    # Customer policy information, if product id is 1007
    if product_code == '1007':
        policy = DataPreparation.customer_policy(apply_code)
        with jzdbtest_session.begin():
            jzdbtest_session.add(policy)
            jzdbtest_session.add(
                DataPreparation.customer_policy_photo(apply_code, policy.id))

    with jzdbtest_session.begin():
        jzdbtest_session.add_all([
            apply_sn, reg_sn, cr, apply, id_card, check_file_1, check_file_2,
            check_file_3, basic_info
        ])

    # Create credits
    pboc1, pboc2 = DataPreparation.api_pboc(apply_code, jzdbtest_session)
    file_result = DataPreparation.customer_check_file_result(apply_code)

    # Customer information review
    apply_confirm_result = DataPreparation.customer_applyconfirm_result(
        apply_code)

    # Entire customer information
    application_info = DataPreparation.customer_application_info(apply_code)

    # Intermedia agreements
    agreement2, agreement3, agreement4 = DataPreparation.customer_intermediary_agreement_file(
        apply_code)

    with jzdbtest_session.begin():
        jzdbtest_session.add_all([
            pboc1, pboc2, file_result, apply_confirm_result, application_info,
            agreement2, agreement3, agreement4
        ])

    # Intermedia agreements review
    media_file_result = DataPreparation.customer_intermediary_agreement_result(
        apply_code)

    with jzdbtest_session.begin():
        jzdbtest_session.add(media_file_result)

    return product_code, customer
Пример #18
0
        for i in range(int(record_size)):
            logger.info('To create data #{}'.format(str(i + 1)))
            apply_prefix, apply_suffix, apply_code = DataPreparation.create_serial_number(
                'AP', jzdbtest_session)
            product_code, customer = data_preparation(jzdbtest_session,
                                                      apply_prefix,
                                                      apply_suffix, apply_code,
                                                      user_id, organ_id,
                                                      net_id)
            telephone_verification_preparation(product_code, apply_code,
                                               jzdbtest_session)
            logger.info('To create workflow #{}'.format(str(i + 1)))
            workflow = WorkflowTrigger(apply_code, user_id=user_id)
            branch_workflow(workflow)
            if xarg == 'interview':
                file_audit_workflow(workflow, file_auditor)
                solution_confirm_workflow(workflow)
                intermedia_file_audit(workflow, agreement_auditor)
                launch_phone_verification_workflow(workflow, product_code)
                launch_branch_registration_workflow(workflow, customer.name,
                                                    customer.id_number)
        print('{} data have been imported.'.format(str(record_size)))
        sso_session.close()
        jzdbtest_session.close()
    else:
        # for export simulated data
        with open(xarg + '.csv', 'w') as csv:
            for _ in range(int(record_size)):
                customer = Generator.generate_customer()
                csv.write(str(customer) + '\n')
Пример #19
0
total_epoch = 100
batch_size = 100
learning_rate = 0.0002

n_input = 28 * 28
n_noise = 128


def get_noise(batch_size, n_noise):
    return np.random.normal(size=(batch_size, n_noise))


X = tf.placeholder(tf.float32, [None, n_input])
Z = tf.placeholder(tf.float32, [None, n_noise])

G = Generator(n_input, n_noise)
G_out = G.net(Z)

D = Discriminator(n_input, n_noise)
D_gene = D.net(G_out, reuse=False)
D_real = D.net(X, reuse=True)

loss_D = tf.reduce_mean(tf.log(D_real) + tf.log(1 - D_gene))
loss_G = tf.reduce_mean(tf.log(D_gene))

D_var_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                               scope='discriminator')
G_var_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                               scope='generator')
#print('G_var_list:', len(G_var_list))
#print('D_var_list:', len(D_var_list))
Пример #20
0
    ti = ti.reshape((ti.shape[0], ti.shape[1], 1))
ti = ti.transpose((2, 0, 1))
ti = ti / 128. - 1.0

#%% Check generated models for a given epoch
DoFiltering = False
DoThreshold = True
TriCat = True
epoch = 299

gpath = work_dir + '/netG_epoch_' + str(epoch) + '.pth'

cuda = False

from generators import G as Generator
netG = Generator(nc=1, nz=3, ngf=64, gfs=3, ngpu=1, cuda=cuda, gpath=gpath)
netG.eval()

#rn_seed=2043
#np.random.seed(rn_seed)

nz = 3
zx = 5
zy = 3
znp = np.random.uniform(0, 1, (1, nz, zx, zy)) * 2 - 1

z = torch.from_numpy(znp).float()

t0 = time.time()

if cuda:
class InternalCommands:
	
	project = 'synergy'
	setup_version = 5 # increment to force setup/config
	website_url = 'http://synergy-foss.org/'

	this_cmd = 'hm'
	cmake_cmd = 'cmake'
	qmake_cmd = 'qmake'
	make_cmd = 'make'
	xcodebuild_cmd = 'xcodebuild'
	w32_make_cmd = 'mingw32-make'
	w32_qt_version = '4.6.2'
	defaultTarget = 'release'

	cmake_dir = 'res'
	gui_dir = 'src/gui'
	doc_dir = 'doc'

	sln_filename = '%s.sln' % project
	xcodeproj_filename = '%s.xcodeproj' % project
	configDir = 'build'
	configFilename = '%s/%s.cfg' % (configDir, this_cmd)
	qtpro_filename = 'gui.pro'
	doxygen_filename = 'doxygen.cfg'

	cmake_url = 'http://www.cmake.org/cmake/resources/software.html'

	# try_chdir(...) and restore_chdir() will use this
	prevdir = ''
	
	# by default, no index specified as arg
	generator_id = None
	
	# by default, prompt user for input
	no_prompts = False
	
	# by default, don't compile the gui
	enable_make_gui = False

	win32_generators = {
		1 : Generator('Visual Studio 10'),
		2 : Generator('Visual Studio 10 Win64'),
		3 : Generator('Visual Studio 9 2008'),
		4 : Generator('Visual Studio 9 2008 Win64'),
		5 : Generator('Visual Studio 8 2005'),
		6 : Generator('Visual Studio 8 2005 Win64')
	}

	unix_generators = {
		1 : MakefilesGenerator(),
		2 : EclipseGenerator(),
	}

	darwin_generators = {
		1 : MakefilesGenerator(),
		2 : Generator('Xcode'),
		3 : EclipseGenerator(),
	}

	def getBuildDir(self, target=''):
		return self.getGenerator().getBuildDir(target)

	def getBinDir(self, target=''):
		return self.getGenerator().getBinDir(target)

	def sln_filepath(self):
		return '%s\%s' % (self.getBuildDir(), self.sln_filename)

	def xcodeproj_filepath(self, target=''):
		return '%s/%s' % (self.getBuildDir(target), self.xcodeproj_filename)
		
	def usage(self):
		app = sys.argv[0]
		print ('Usage: %s <command> [-g <index>|-v|--no-prompts|<command-options>]\n'
			'\n'
			'Replace [command] with one of:\n'
			'  about       Show information about this script\n'
			'  setup       Runs the initial setup for this script\n'
			'  conf        Runs cmake (generates project files)\n'
			'  open        Attempts to open the generated project file\n'
			'  build       Builds using the platform build chain\n'
			'  clean       Cleans using the platform build chain\n'
			'  kill        Kills all synergy processes (run as admin)\n'
			'  update      Updates the source code from repository\n'
			'  revision    Display the current source code revision\n'
			'  package     Create a distribution package (e.g. tar.gz)\n'
			'  install     Installs the program\n'
			'  doxygen     Builds doxygen documentation\n'
			'  reformat    Reformat .cpp and .h files using AStyle\n'
			'  usage       Shows the help screen\n'
			'\n'
			'Example: %s build -g 3'
			) % (app, app)

	def configureAll(self, targets):

		# if no mode specified, use default
		if len(targets) == 0:
			targets += [self.defaultTarget,]

		for target in targets:
			self.configure(target)

	def configure(self, target='', extraArgs=''):
		
		cmake_args = ''

		# ensure latest setup and do not ask config for generator (only fall 
		# back to prompt if not specified as arg)
		self.ensure_setup_latest()
		
		# ensure that we have access to cmake
		_cmake_cmd = self.persist_cmake()

		# now that we know we've got the latest setup, we can ask the config
		# file for the generator (but again, we only fall back to this if not 
		# specified as arg).
		generator = self.getGenerator()
		
		if generator != self.findGeneratorFromConfig():
		        print('Generator changed, running setup.')
			self.setup(target)

		if generator.cmakeName != '':
			cmake_args += ' -G "' + generator.cmakeName + '"'
		
		# default is release
		if target == '':
			print 'Defaulting target to: ' + self.defaultTarget
			target = self.defaultTarget

		# for makefiles always specify a build type (debug, release, etc)
		if generator.cmakeName.find('Unix Makefiles') != -1:
			cmake_args += ' -DCMAKE_BUILD_TYPE=' + target.capitalize()
		
		# if not visual studio, use parent dir
		sourceDir = generator.getSourceDir()
		
		if extraArgs != '':
			cmake_args += ' ' + extraArgs

		cmake_cmd_string = _cmake_cmd + cmake_args + ' ' + sourceDir
		
		# Run from build dir so we have an out-of-source build.
		self.try_chdir(self.getBuildDir(target))

		print "CMake command: " + cmake_cmd_string
		err = os.system(cmake_cmd_string)

		self.restore_chdir()

		if generator.cmakeName.find('Eclipse') != -1:
			self.fixCmakeEclipseBug()

		if err != 0:
			raise Exception('CMake encountered error: ' + str(err))
		
		# allow user to skip qui compile
		if self.enable_make_gui:
			
			# make sure we have qmake
			self.persist_qmake()
			
			qmake_cmd_string = self.qmake_cmd + " " + self.qtpro_filename + " -r"

			if sys.platform == "darwin":
				# create makefiles on mac (not xcode).
				qmake_cmd_string += " -spec macx-g++"
				
				(major, minor) = self.getMacVersion()
				if major == 10 and minor <= 4:
					# 10.4: universal (intel and power pc)
					qmake_cmd_string += ' CONFIG+="ppc i386"'

			print "QMake command: " + qmake_cmd_string
			
			# run qmake from the gui dir
			self.try_chdir(self.gui_dir)
			err = os.system(qmake_cmd_string)
			self.restore_chdir()
			
			if err != 0:
				raise Exception('QMake encountered error: ' + str(err))
		
		self.setConfRun(target)

	# http://tinyurl.com/cs2rxxb
	def fixCmakeEclipseBug(self):
		print "Fixing CMake Eclipse bugs..."

		file = open('.project', 'r+')
		content = file.read()
		pattern = re.compile('\s+<linkedResources>.+</linkedResources>', re.S)
		content = pattern.sub('', content)
		file.seek(0)
		file.write(content)
		file.truncate()
		file.close()

	def persist_cmake(self):
		# even though we're running `cmake --version`, we're only doing this for the 0 return
		# code; we don't care about the version, since CMakeLists worrys about this for us.
		err = os.system('%s --version' % self.cmake_cmd)
		
		if err != 0:
			# if return code from cmake is not 0, then either something has
			# gone terribly wrong with --version, or it genuinely doesn't exist.
			print ('Could not find `%s` in system path.\n'
			       'Download the latest version from:\n  %s') % (
				self.cmake_cmd, self.cmake_url)
			raise Exception('Cannot continue without CMake.')
		else:	
			return self.cmake_cmd

	def persist_qt(self):
		self.persist_qmake()

	def persist_qmake(self):
		# cannot use subprocess on < python 2.4
		if sys.version_info < (2, 4):
			return
		
		try:
			p = subprocess.Popen(
				[self.qmake_cmd, '--version'], 
				stdout=subprocess.PIPE, 
				stderr=subprocess.PIPE)
		except:
			print >> sys.stderr, 'Error: Could not find qmake.'
			if sys.platform == 'win32': # windows devs usually need hints ;)
				print (
					'Suggestions:\n'
					'1. Ensure that qmake.exe exists in your system path.\n'
					'2. Try to download Qt (check our dev FAQ for links):\n'
					'  qt-sdk-win-opensource-2010.02.exe')
			raise Exception('Cannot continue without qmake.')
		
		stdout, stderr = p.communicate()
		if p.returncode != 0:
			raise Exception('Could not test for cmake: %s' % stderr)
		else:
			m = re.search('.*Using Qt version (\d+\.\d+\.\d+).*', stdout)
			if m:
				if sys.platform == 'win32':
					ver = m.group(1)
					if ver != self.w32_qt_version: # TODO: test properly
						print >> sys.stderr, (
							'Warning: Not using supported Qt version %s'
							' (your version is %s).'
							) % (self.w32_qt_version, ver)
				else:
					pass # any version should be ok for other platforms
			else:
				raise Exception('Could not find qmake version.')

	def ensureConfHasRun(self, target, skipConfig):
		if self.hasConfRun(target):
			print 'Skipping config for target: ' + target
			skipConfig = True

		if not skipConfig:
			self.configure(target)

	def build(self, targets=[], skipConfig=False):

		# if no mode specified, use default
		if len(targets) == 0:
			targets += [self.defaultTarget,]
	
		self.ensure_setup_latest()

		generator = self.getGeneratorFromConfig().cmakeName
		
		if generator.find('Unix Makefiles') != -1:
			for target in targets:
				self.ensureConfHasRun(target, skipConfig)
				self.runBuildCommand(self.make_cmd, target)
		else:
			self.ensureConfHasRun('all', skipConfig)
			for target in targets:
				if generator.startswith('Visual Studio'):
					self.run_vcbuild(generator, target)
				elif generator == 'Xcode':
					cmd = self.xcodebuild_cmd + ' -configuration ' + target.capitalize()
					self.runBuildCommand(cmd, target)
				else:
					raise Exception('Build command not supported with generator: ' + generator)

		# allow user to skip qui compile
		if self.enable_make_gui:
			self.make_gui(targets)
	
	def runBuildCommand(self, cmd, target):
	
		self.try_chdir(self.getBuildDir(target))
		err = os.system(cmd)
		self.restore_chdir()
			
		if err != 0:
			raise Exception(cmd + ' failed: ' + str(err))
	
	def clean(self, targets=[]):
		
		# if no mode specified, use default
		if len(targets) == 0:
			targets += [self.defaultTarget,]
		
		generator = self.getGeneratorFromConfig().cmakeName

		if generator.startswith('Visual Studio'):
			# special case for version 10, use new /target:clean
			if generator.startswith('Visual Studio 10'):
				for target in targets:
					self.run_vcbuild(generator, target, '/target:clean')
				
			# any other version of visual studio, use /clean
			elif generator.startswith('Visual Studio'):
				for target in targets:
					self.run_vcbuild(generator, target, '/clean')

		else:
			cmd = ''
			if generator == "Unix Makefiles":
				print 'Cleaning with GNU Make...'
				cmd = self.make_cmd
			elif generator == 'Xcode':
				print 'Cleaning with Xcode...'
				cmd = self.xcodebuild_cmd
			else:
				raise Exception('Not supported with generator: ' + generator)

			for target in targets:
				self.try_chdir(self.getBuildDir(target))
				err = os.system(cmd + ' clean')
				self.restore_chdir()

				if err != 0:
					raise Exception('Clean failed: ' + str(err))

		# allow user to skip qui compile
		clean_targets = []
		if self.enable_make_gui:
			for target in targets:
				clean_targets.append(target + '-clean')
			
			self.make_gui(clean_targets)
	
	def make_gui(self, targets):
		if sys.platform == 'win32':
			gui_make_cmd = self.w32_make_cmd
		elif sys.platform in ['linux2', 'sunos5', 'freebsd7', 'darwin']:
			gui_make_cmd = self.make_cmd + " -w"
		else:
			raise Exception('Unsupported platform: ' + sys.platform)
		
		print 'Make GUI command: ' + gui_make_cmd
		
		if sys.platform == 'win32':
			for target in targets:
				self.try_chdir(self.gui_dir)
				err = os.system(gui_make_cmd + ' ' + target)
				self.restore_chdir()
				
				if err != 0:
					raise Exception(gui_make_cmd + ' failed with error: ' + str(err))
		else:
			self.try_chdir(self.gui_dir)
			err = os.system(gui_make_cmd)
			self.restore_chdir()

			if err != 0:
				raise Exception(gui_make_cmd + ' failed with error: ' + str(err))

			if sys.platform == 'darwin':
				self.macPostMakeGui()
	
	def macPostMakeGui(self):

		dir = self.getGenerator().binDir

		# copy synergy[cs] binaries into the bundle, since the gui
		# now looks for the binaries in the current app dir.
		shutil.copy(dir + "/synergyc",
			    dir + "/Synergy.app/Contents/MacOS/")
		shutil.copy(dir + "/synergys",
			    dir + "/Synergy.app/Contents/MacOS/")

	def open(self):
		generator = self.getGeneratorFromConfig().cmakeName
		if generator.startswith('Visual Studio'):
			print 'Opening with %s...' % generator
			self.open_internal(self.sln_filepath())
			
		elif generator.startswith('Xcode'):
			print 'Opening with %s...' % generator
			self.open_internal(self.xcodeproj_filepath(), 'open')
			
		else:
			raise Exception('Not supported with generator: ' + generator)
		
	def update(self):
		print "Running Subversion update..."
		err = os.system('svn update')
		if err != 0:
			raise Exception('Could not update from repository with error code code: ' + str(err))
		
	def revision(self):
		print self.find_revision()

	def find_revision(self):
		if sys.version_info < (2, 4):
			import commands
			stdout = commands.getoutput('svn info')
		else:
			p = subprocess.Popen(['svn', 'info'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
			stdout, stderr = p.communicate()

			if p.returncode != 0:
				raise Exception('Could not get revision - svn info failed with code: ' + str(p.returncode))
		
		m = re.search('.*Revision: (\d+).*', stdout)
		if not m:
			raise Exception('Could not find revision number in svn info output.')
		
		return m.group(1)
		
	def kill(self):
		if sys.platform == 'win32':
			return os.system('taskkill /F /FI "IMAGENAME eq synergy*"')
		else:
			raise Exception('Not implemented for platform: ' + sys.platform)
		
	def doxygen(self):
		# The conf generates doc/doxygen.cfg from cmake/doxygen.cfg.in
		self.configure(self.defaultTarget, '-DCONF_DOXYGEN:BOOL=TRUE')

		err = os.system('doxygen %s/%s' % (self.doc_dir, self.doxygen_filename))
			
		if err != 0:
			raise Exception('doxygen failed with error code: ' + str(err))
				
	def dist(self, type, vcRedistDir, qtDir):

		# Package is supported by default.
		package_unsupported = False
		unixTarget = self.defaultTarget
		
		if type == '' or type == None:
			raise Exception('No type specified.')

		if type != 'win' and type != 'mac':
			self.configure(unixTarget, '-DCONF_CPACK:BOOL=TRUE')

		# make sure we have a release build to package
		self.build([self.defaultTarget], skipConfig=True)

		moveExt = ''

		if type == None:
			self.dist_usage()
			return
			
		elif type == 'src':
			if sys.platform in ['linux2', 'darwin']:
				self.distSrc()
			else:
				package_unsupported = True
			
		elif type == 'rpm':
			if sys.platform == 'linux2':
				self.dist_run('cpack -G RPM', unixTarget)
				moveExt = 'rpm'
			else:
				package_unsupported = True
			
		elif type == 'deb':
			if sys.platform == 'linux2':
				self.dist_run('cpack -G DEB', unixTarget)
				moveExt = 'deb'
			else:
				package_unsupported = True
			
		elif type == 'win':
			if sys.platform == 'win32':
				self.distNsis(vcRedistDir, qtDir)
			else:
				package_unsupported = True
			
		elif type == 'mac':
			if sys.platform == 'darwin':
				self.distMac(unixTarget)
			else:
				package_unsupported = True
			
		else:
			raise Exception('Package type not supported: ' + type)

		if moveExt != '':
			self.unixMove(
				self.getGenerator().buildDir + '/release/*.' + moveExt,
				self.getGenerator().binDir)

		if package_unsupported:
			raise Exception(
				("Package type, '%s' is not supported for platform, '%s'") 
				% (type, sys.platform))
		
	def distSrc(self):
		version = self.getVersionFromCmake()
		name = (self.project + '-' + version + '-Source')
		exportPath = self.getGenerator().buildDir + '/' + name

		if os.path.exists(exportPath):
			print "Removing existing export..."
			shutil.rmtree(exportPath)

		print 'Exporting repository to: ' + exportPath
		err = os.system('svn export . ' + exportPath)
		if err != 0:
			raise Exception('Repository export failed: ' + str(err))		

		packagePath = '../' + self.getGenerator().binDir + '/' + name + '.tar.gz'

		try:
			self.try_chdir(self.getGenerator().buildDir)
			print 'Packaging to: ' + packagePath
			err = os.system('tar cfvz ' + packagePath + ' ' + name)
			if err != 0:
				raise Exception('Package failed: ' + str(err))
		finally:
			self.restore_chdir()

	def unixMove(self, source, dest):
		print 'Moving ' + source + ' to ' + dest
		err = os.system('mv ' + source + ' ' + dest)
		if err != 0:
			raise Exception('Package failed: ' + str(err))

	def distMac(self, unixTarget):

		dir = self.getGenerator().binDir

		# use qt to copy libs to bundle so no dependencies are needed,
		# and create dmg for easy download.
		bin = "macdeployqt Synergy.app -dmg -verbose=2"
		self.try_chdir(dir)
		err = os.system(bin)
		self.restore_chdir()
		
		if err != 0:
			raise Exception(bin + " failed with error: " + str(err))

		fileName = "%s-%s-%s.dmg" % (
			self.project, 
			self.getVersionFromCmake(),
			self.getMacPackageName())

		shutil.move(dir + "/Synergy.dmg", dir + "/" + fileName)

	def distNsis(self, vcRedistDir, qtDir):
		
		if vcRedistDir == '':
			raise Exception(
				'VC++ redist dir path not specified (--vcredist-dir).')

		if qtDir == '':
			raise Exception(
				'QT SDK dir path not specified (--qt-dir).')

		generator = self.getGeneratorFromConfig().cmakeName

		arch = 'x86'
		installDirVar = '$PROGRAMFILES32'

		if generator.endswith('Win64'):
			arch = 'x64'
			installDirVar = '$PROGRAMFILES64'			
		
		templateFile = open(self.cmake_dir + '\Installer.nsi.in')
		template = templateFile.read()

		template = template.replace('${in:version}', self.getVersionFromCmake())
		template = template.replace('${in:arch}', arch)
		template = template.replace('${in:vcRedistDir}', vcRedistDir)
		template = template.replace('${in:qtDir}', qtDir)
		template = template.replace('${in:installDirVar}', installDirVar)

		nsiPath = self.getGenerator().buildDir + '\Installer.nsi'
		nsiFile = open(nsiPath, 'w')
		nsiFile.write(template)
		nsiFile.close()

		command = 'makensis ' + nsiPath
		print 'NSIS command: ' + command
		err = os.system(command)
		if err != 0:
			raise Exception('Package failed: ' + str(err))

	def getVersionFromCmake(self):
		cmakeFile = open('CMakeLists.txt')
		cmake = cmakeFile.read()

		majorRe = re.search('VERSION_MAJOR (\d+)', cmake)
		major = majorRe.group(1)

		minorRe = re.search('VERSION_MINOR (\d+)', cmake)
		minor = minorRe.group(1)

		revRe = re.search('VERSION_REV (\d+)', cmake)
		rev = revRe.group(1)

		return major + '.' + minor + '.' + rev

	def distftp(self, type, ftp):
		if not type:
			raise Exception('Type not specified.')
		
		if not ftp:
			raise Exception('FTP info not defined.')
		
		src = self.dist_name(type)
		dest = self.dist_name_rev(type)
		print 'Uploading %s to FTP server %s...' % (dest, ftp.host)

		srcDir = 'bin/'
		generator = self.getGeneratorFromConfig().cmakeName
		#if not generator.startswith('Visual Studio'):
		#	srcDir += 'release/'

		ftp.run(srcDir + src, dest) 
		print 'Done'
	
	def dist_name(self, type):
		ext = None
		platform = None
		
		if type == 'src':
			ext = 'tar.gz'
			platform = 'Source'
			
		elif type == 'rpm' or type == 'deb':
		
			# os_bits should be loaded with '32bit' or '64bit'
			import platform
			(os_bits, other) = platform.architecture()
		
			# get platform based on current platform
			ext = type
			if os_bits == '32bit':
				platform = 'Linux-i686'
			elif os_bits == '64bit':
				platform = 'Linux-x86_64'
			
		elif type == 'win':
			
			# get platform based on last generator used
			ext = 'exe'
			generator = self.getGeneratorFromConfig().cmakeName
			if generator.find('Win64') != -1:
				platform = 'Windows-x64'
			else:
				platform = 'Windows-x86'
			
		elif type == 'mac':
			ext = "dmg"
			platform = self.getMacPackageName()
		
		if not platform:
			raise Exception('Unable to detect package platform.')
		
		pattern = re.escape(self.project + '-') + '\d\.\d\.\d' + re.escape('-' + platform + '.' + ext)
		
		# only use release dir if not windows
		target = ''

		for filename in os.listdir(self.getBinDir(target)):
			if re.search(pattern, filename):
				return filename
		
		# still here? package probably not created yet.
		raise Exception('Could not find package name with pattern: ' + pattern)
	
	def dist_name_rev(self, type):
		# find the version number (we're puting the rev in after this)
		pattern = '(.*\d+\.\d+\.\d+)(.*)'
		replace = '\g<1>-r' + self.find_revision() + '\g<2>'
		return re.sub(pattern, replace, self.dist_name(type))
	
	def dist_run(self, command, target=''):
		self.try_chdir(self.getBuildDir(target))
		print 'CPack command: ' + command
		err = os.system(command)
		self.restore_chdir()
		if err != 0:
			raise Exception('Package failed: ' + str(err))

	def dist_usage(self):
		print ('Usage: %s package [package-type]\n'
			'\n'
			'Replace [package-type] with one of:\n'
			'  src    .tar.gz source (Posix only)\n'
			'  rpm    .rpm package (Red Hat)\n'
			'  deb    .deb paclage (Debian)\n'
			'  win    .exe installer (Windows)\n'
			'  mac    .dmg package (Mac OS X)\n'
			'\n'
			'Example: %s package src-tgz') % (self.this_cmd, self.this_cmd)

	def about(self):
		print ('Help Me script, from the Synergy project.\n'
			'%s\n'
			'\n'
			'For help, run: %s help') % (self.website_url, self.this_cmd)

	def try_chdir(self, dir):
		global prevdir

		if dir == '':
			prevdir = ''
			return

		# Ensure temp build dir exists.
		if not os.path.exists(dir):
			print 'Creating dir: ' + dir
			os.makedirs(dir)
 
		prevdir = os.path.abspath(os.curdir)

		# It will exist by this point, so it's safe to chdir.
		print 'Entering dir: ' + dir
		os.chdir(dir)

	def restore_chdir(self):
		global prevdir
		if prevdir == '':
			return
		print 'Going back to: ' + prevdir
		os.chdir(prevdir)

	def open_internal(self, project_filename, application = ''):

		if not os.path.exists(project_filename):
			raise Exception('Project file (%s) not found, run hm conf first.' % project_filename)
		else:
			path = project_filename
			
			if application != '':
				path = application + ' ' + path
			
			err = os.system(path)
			if err != 0:
				raise Exception('Could not open project with error code code: ' + str(err))

	def setup(self, target=''):
		print "Running setup..."

		oldGenerator = self.findGeneratorFromConfig()
		if not oldGenerator == None:
			for target in ['debug', 'release']:				
				buildDir = oldGenerator.getBuildDir(target)

				cmakeCacheFilename = 'CMakeCache.txt'
				if buildDir != '':
					cmakeCacheFilename = buildDir + '/' + cmakeCacheFilename

				if os.path.exists(cmakeCacheFilename):
					print "Removing %s, since generator changed." % cmakeCacheFilename
					os.remove(cmakeCacheFilename)

		# always either get generator from args, or prompt user when 
		# running setup
		generator = self.get_generator_from_prompt()

		if os.path.exists(self.configFilename):
			config = ConfigParser.ConfigParser()
			config.read(self.configFilename)
		else:
			config = ConfigParser.ConfigParser()

		if not config.has_section('hm'):
			config.add_section('hm')

		if not config.has_section('cmake'):
			config.add_section('cmake')
		
		config.set('hm', 'setup_version', self.setup_version)
		
		# store the generator so we don't need to ask again
		config.set('cmake', 'generator', generator)

		self.write_config(config)

		# for all targets, set conf not run
		self.setConfRun('all', False)
		self.setConfRun('debug', False)
		self.setConfRun('release', False)

		print "Setup complete."

	def write_config(self, config, target=''):
		if not os.path.isdir(self.configDir):
			os.mkdir(self.configDir)
		configfile = open(self.configFilename, 'wb')
		config.write(configfile)

	def getGeneratorFromConfig(self):
		generator = self.findGeneratorFromConfig()
		if generator:
			return generator
		
		raise Exception("Could not find generator: " + name)

	def findGeneratorFromConfig(self):
		config = ConfigParser.RawConfigParser()
		config.read(self.configFilename)
		
		if not config.has_section('cmake'):
			return None
		
		name = config.get('cmake', 'generator')

		generators = self.get_generators()
		keys = generators.keys()
		keys.sort()
		for k in keys:
			if generators[k].cmakeName == name:
				return generators[k]
		
		return None

	def min_setup_version(self, version):
		if os.path.exists(self.configFilename):
			config = ConfigParser.RawConfigParser()
			config.read(self.configFilename)

			try:
				return config.getint('hm', 'setup_version') >= version
			except:
				return False
		else:
			return False

	def hasConfRun(self, target):
		if self.min_setup_version(2):
			config = ConfigParser.RawConfigParser()
			config.read(self.configFilename)
			try:
				return config.getboolean('hm', 'conf_done_' + target)
			except:
				return False
		else:
			return False

	def setConfRun(self, target, hasRun=True):
		if self.min_setup_version(3):
			config = ConfigParser.RawConfigParser()
			config.read(self.configFilename)
			config.set('hm', 'conf_done_' + target, hasRun)
			self.write_config(config)
		else:
			raise Exception("User does not have correct setup version.")

	def get_generators(self):
		if sys.platform == 'win32':
			return self.win32_generators
		elif sys.platform in ['linux2', 'sunos5', 'freebsd7']:
			return self.unix_generators
		elif sys.platform == 'darwin':
			return self.darwin_generators
		else:
			raise Exception('Unsupported platform: ' + sys.platform)
			
	def get_generator_from_prompt(self):
		return self.getGenerator().cmakeName

	def getGenerator(self):
		generators = self.get_generators()
		if len(generators.keys()) == 1:
			return generators[generators.keys()[0]]
		
		# if user has specified a generator as an argument
		if self.generator_id:
			return generators[int(self.generator_id)]

		conf = self.findGeneratorFromConfig()
		if conf:
		    return conf
		
		raise Exception(
			'Generator not specified, use -g arg ' + 
			'(use `hm genlist` for a list of generators).')

	def setup_generator_prompt(self, generators):

		if self.no_prompts:
			raise Exception('User prompting is disabled.')
	
		prompt = 'Enter a number:'
		print prompt,
		
		generator_id = raw_input()
		
		if generator_id in generators:
			print 'Selected generator:', generators[generator_id]
		else:
			print 'Invalid number, try again.'
			self.setup_generator_prompt(generators)

		return generators[generator_id]

	def get_vcvarsall(self, generator):	
		import platform, _winreg
		
		# os_bits should be loaded with '32bit' or '64bit'
		(os_bits, other) = platform.architecture()
		
		# visual studio is a 32-bit app, so when we're on 64-bit, we need to check the WoW dungeon
		if os_bits == '64bit':
			key_name = r'SOFTWARE\Wow6432Node\Microsoft\VisualStudio\SxS\VS7'
		else:
			key_name = r'SOFTWARE\Microsoft\VisualStudio\SxS\VC7'
		
		try:
			key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key_name)
		except:
			raise Exception('Unable to open Visual Studio registry key. Application may not be installed.')
		
		if generator.startswith('Visual Studio 8'):
			value,type = _winreg.QueryValueEx(key, '8.0')
		elif generator.startswith('Visual Studio 9'):
			value,type = _winreg.QueryValueEx(key, '9.0')
		elif generator.startswith('Visual Studio 10'):
			value,type = _winreg.QueryValueEx(key, '10.0')
		else:
			raise Exception('Cannot determin vcvarsall.bat location for: ' + generator)
		
		# not sure why, but the value on 64-bit differs slightly to the original
		if os_bits == '64bit':
			path = value + r'vc\vcvarsall.bat'
		else:
			path = value + r'vcvarsall.bat'		
		
		if not os.path.exists(path):
			raise Exception("'%s' not found." % path)
		
		return path

	def run_vcbuild(self, generator, mode, args=''):
		import platform
		
		# os_bits should be loaded with '32bit' or '64bit'
		(os_bits, other) = platform.architecture()
		# Now we choose the parameters bases on OS 32/64 and our target 32/64
		# http://msdn.microsoft.com/en-us/library/x4d2c09s%28VS.80%29.aspx
		
		# valid options are only: ia64  amd64 x86_amd64 x86_ia64 
		# but calling vcvarsall.bat does not garantee that it will work
		# ret code from vcvarsall.bat is always 0 so the only way of knowing that I worked is by analysing the text output
		# ms bugg: install VS9, FeaturePack, VS9SP1 and you'll obtain a vcvarsall.bat that fails.
		if generator.find('Win64') != -1:
			# target = 64bit
			if os_bits == '32bit':
				vcvars_platform = 'x86_amd64' # 32bit OS building 64bit app
			else:
				vcvars_platform = 'amd64'  # 64bit OS building 64bit app
			config_platform = 'x64'
		else: # target = 32bit
			vcvars_platform = 'x86' # 32/64bit OS building 32bit app
			config_platform = 'Win32'
		if mode == 'release':
			config = 'Release'
		else:
			config = 'Debug'
				
		if generator.startswith('Visual Studio 10'):
			cmd = ('@echo off\n'
				'call "%s" %s \n'
				'msbuild /nologo %s /p:Configuration="%s" /p:Platform="%s" "%s"'
				) % (self.get_vcvarsall(generator), vcvars_platform, args, config, config_platform, self.sln_filepath())
		else:
			config = config + '|' + config_platform
			cmd = ('@echo off\n'
				'call "%s" %s \n'
				'vcbuild /nologo %s "%s" "%s"'
				) % (self.get_vcvarsall(generator), vcvars_platform, args, self.sln_filepath(), config)
		
		# Generate a batch file, since we can't use environment variables directly.
		temp_bat = self.getBuildDir() + r'\vcbuild.bat'
		file = open(temp_bat, 'w')
		file.write(cmd)
		file.close()

		err = os.system(temp_bat)
		if err != 0:
			raise Exception('Microsoft compiler failed with error code: ' + str(err))

	def ensure_setup_latest(self):
		if not self.min_setup_version(self.setup_version):
			self.setup()

	def reformat(self):
		err = os.system(
			r'tool\astyle\AStyle.exe '
			'--quiet --suffix=none --style=java --indent=force-tab=4 --recursive '
			'lib/*.cpp lib/*.h cmd/*.cpp cmd/*.h')
			
		if err != 0:
			raise Exception('Reformat failed with error code: ' + str(err))

	def printGeneratorList(self):
		generators = self.get_generators()
		keys = generators.keys()
		keys.sort()
		for k in keys:
			print str(k) + ': ' + generators[k].cmakeName

	def getMacVersion(self):
		import commands
		versions = commands.getoutput('/usr/bin/sw_vers')
		result = re.search('ProductVersion:\t(\d+)\.(\d+)', versions)

		if not result:
			print versions
			raise Exception(
				'Could not find Mac OS X version in sw_vers output.')

		major = int(result.group(1))
		minor = int(result.group(2))
		return (major, minor)

	def getMacPackageName(self):

		(major, minor) = self.getMacVersion()

		if major == 10:
			if minor <= 4:
				# 10.4: intel and power pc
				arch = "Universal"
			elif minor <= 6:
				# 10.5: 32-bit intel
				arch = "i386"
			else:
				# 10.7: 64-bit intel (gui only)
				arch = "x86_64"
		else:
			raise Exception("Mac OS major version unknown: " +
					str(major))

		# version is major and minor with no dots (e.g. 106)
		version = str(major) + str(minor)

		return "MacOSX%s-%s" % (version, arch)

	def reset(self):
		if os.path.exists('build'):
			shutil.rmtree('build')
		if os.path.exists('bin'):
			shutil.rmtree('bin')
		if os.path.exists('lib'):
			shutil.rmtree('lib')
		if os.path.exists('src/gui/tmp'):
			shutil.rmtree('src/gui/tmp')
Пример #22
0
NUM_CLASSES = 21
input_shape = (300, 300, 3)
batch_size = 16

# SSD config
priors = pickle.load(open('prior_boxes_ssd300.pkl', 'rb'))
bbox_util = BBoxUtility(NUM_CLASSES, priors)

# Dataload
gt, train_keys, val_keys = load_data()

# Data generator
gen = Generator(gt,
                bbox_util,
                batch_size,
                path_prefix,
                train_keys,
                val_keys, (input_shape[0], input_shape[1]),
                do_crop=False)

# make model
model = SSD300(input_shape, num_classes=NUM_CLASSES)

weights_file_path = 'weights_SSD300.hdf5'
if os.path.exists(weights_file_path):
    model.load_weights(weights_file_path, by_name=True)

## Not Trainable layer settings
freeze = [
    'input_1', 'conv1_1', 'conv1_2', 'pool1', 'conv2_1', 'conv2_2', 'pool2',
    'conv3_1', 'conv3_2', 'conv3_3', 'pool3'
Пример #23
0
import sys

from generators import Generator
from plugins import load_plugins


def find_parser(url, args):
    plugins = load_plugins()
    for plugin in plugins:
        if plugin.can_handle(url):
            return plugin.get_parser(url, args)
    raise Exception("No plugin for URL: %s" % url)


def parse_args():
    args_len = len(sys.argv)
    if args_len > 1:
        url = sys.argv[1]
    else:
        raise Exception("First argument: URL")
    args = args_len > 2 and sys.argv[2:]
    return url, args


if __name__ == '__main__':
    url, args = parse_args()
    parser = find_parser(url, args)
    generator = Generator(parser)
    generator.write_xml(sys.stdout)
Пример #24
0
def generator_cover_features_sentiment_manager_sharpness_preprocessor():
    '''
    Returns pipelines main (83 fields), managers (1 field), photo_cover_stats (3 fields), photo_cover (3*100*300), ids (1 field), response (3 fields)
    '''
    json_loader = l.JSONLoader()
    preprocessor = l.Preprocessor()
    preprocessor.with_pipeline('origin').set_loader(json_loader)
    preprocessor.add_operation(l.DateTimeExtractor()).add_operation(
        l.NewSimplePredictors())
    preprocessor.add_operation(
        l.LogTransform(['price_per_bedroom', 'price', 'price_per_bathroom']))
    preprocessor.add_operation(
        l.Selector([
            'listing_id', 'bathrooms', u'bedrooms', 'latitude', 'longitude',
            'price', 'month', 'day_of_month', 'hour', 'day_of_week',
            'price_per_bathroom', 'price_per_bedroom', 'num_features',
            'features_len', 'num_photos'
        ]))
    merger = l.PandasColumnMerger(
        ['origin', 'features', 'sentiment', 'photo_stats'],
        on='listing_id',
        how='left')
    preprocessor.set_consumer(merger)
    preprocessor.with_pipeline('features').set_loader(
        l.CSVLoader('data/features_train.csv', 'data/features_test.csv'))
    preprocessor.set_consumer(merger)
    preprocessor.with_pipeline('sentiment').set_loader(
        l.CSVLoader('data/sentiment_train.csv', 'data/sentiment_test.csv'))
    preprocessor.set_consumer(merger)
    photo_url_merger = l.GetTopPhotoMerger('photo_stats_sharpness',
                                           'photo_stats_photo_url')
    preprocessor.with_pipeline('photo_stats_sharpness').set_loader(
        l.CSVLoader('data/images_train.csv', 'data/images_test.csv'))
    preprocessor.set_consumer(photo_url_merger)
    preprocessor.with_pipeline('photo_stats_photo_url').set_loader(
        json_loader.select_loader(['listing_id', 'photos']))
    preprocessor.set_consumer(photo_url_merger)
    preprocessor.with_pipeline('photo_stats').set_loader(photo_url_merger)
    preprocessor.add_operation(
        l.Selector(['listing_id', 'avg_width', 'avg_height', 'avg_sharpness']))
    preprocessor.add_operation(l.LogTransform(['avg_sharpness'
                                               ])).set_consumer(merger)
    preprocessor.with_pipeline('main').set_loader(merger).add_operation(
        l.ColumnDrop('listing_id'))
    preprocessor.add_operation(l.ToNdarray()).add_operation(
        preprocessing.StandardScaler())
    preprocessor.with_pipeline('response').set_loader(
        json_loader.select_loader('interest_level'), only_train=True)
    preprocessor.add_operation(
        l.Dummifier(output_cols=['high', 'medium', 'low'])).add_operation(
            l.ToNdarray())
    preprocessor.with_pipeline('managers').set_loader(
        json_loader.select_loader('manager_id'))
    preprocessor.add_operation(l.CategoricalFilter(999)).add_operation(
        l.ToNdarray(dtype=np.int64, outshape=(-1, 1)))
    preprocessor.with_pipeline('photo_cover_stats').set_loader(
        photo_url_merger)
    preprocessor.add_operation(
        l.Selector(['cover_width', 'cover_height', 'cover_sharpness']))
    preprocessor.add_operation(l.LogTransform(['cover_sharpness']))
    preprocessor.add_operation(l.ToNdarray()).add_operation(
        preprocessing.StandardScaler())
    preprocessor.with_pipeline('photo_cover').set_loader(photo_url_merger)
    preprocessor.add_operation(l.Selector(['listing_id', 'photo_name']))
    preprocessor.with_pipeline('ids').set_loader(
        json_loader.select_loader('listing_id'))
    preprocessor.add_operation(l.ToNdarray(dtype=np.int64))
    generator = Generator(preprocessor,
                          {'photo_cover': l.PhotoLoaderGenerator()},
                          l.SeparateKey('response'))
    return generator
Пример #25
0
    state_bneck_out = states_bneck[c][1]
    state_bneck_in = states_bneck[c + 1][0]
    states_clique = [s for s in states_cliques[c] if s != state_bneck_out]
    ENV.T[states_clique, state_bneck_out] = 1.
    ENV.T[state_bneck_out, state_bneck_in] = 1.
state_bneck_out = states_bneck[-1][1]
state_bneck_in = states_bneck[0][0]
states_clique = [s for s in states_cliques[-1] if s != state_bneck_out]
ENV.T[states_clique, state_bneck_out] = 1.
ENV.T[state_bneck_out, state_bneck_in] = 1.
ENV.T[(ENV.T < 1) & (ENV.T > 0)] = eps
ENV.T = row_norm(ENV.T)
ENV.__name__ += '-anticlockwise'

# %%
GEN = Generator(ENV=ENV, jump_rate=jump_rate)
PROPd = Propagator(GEN=GEN, tau=tau_diff, alpha=alpha_diff)
PROPs = Propagator(GEN=GEN, tau=tau_supdiff, alpha=alpha_supdiff)
PROPo = Propagator(GEN=GEN, tau=tau_diff, alpha=alpha_diff)
PROPo.min_zero_cf(lags=lags_opt, rho_init=rho_init)

print('DIFF: average autotransition prob = %0.3f' % np.diag(PROPd.etO).mean())
print('SUPDIFF: average autotransition prob = %0.3f' %
      np.diag(PROPs.etO).mean())

# %% SIMS
if run_explorer:
    exp_eff_s = []
    exp_eff_d = []
    exp_eff_o = []
    cov_visits_s = []
Пример #26
0
 def getGenerator(self, options):
     return Generator.Generator(self.processSignal, self.processShortSignal,
                                self.signalPipeline)
Пример #27
0
    'state_lw': 0.5,
    'traj_width': 0.5,
    'traj_format': '-o',
    'color_time': True,
    'cmap_samp': 'husl',
    'cmap_traj': plt.cm.cool
}

# SAMPLE CROSS-CORRELOGRAMS
ENV = LinearTrack(scale=scale_track)
if start == 'default':
    ENV.start = ENV.start_center
else:
    ENV.start = start

GEN = Generator(ENV=ENV, symmetrize=symmetrize, jump_rate=jump_rate)
# generate trajectories under diffusion (alpha) and turbulence (+ spectral noise)
PROPt = Propagator(GEN=GEN,
                   sigma=sigma,
                   tau=tau,
                   alpha=alpha_turb,
                   spec_noise=spec_noise)
PROPd = Propagator(GEN=GEN,
                   sigma=sigma,
                   tau=tau,
                   alpha=alpha_diff,
                   spec_noise=0.)
PROPd.plot_prop_kernels(n=6)
EXPt = Explorer(PROP=PROPt, rho_init=ENV.start, mass=mass, no_dwell=no_dwell)
EXPd = Explorer(PROP=PROPd, rho_init=ENV.start, mass=mass, no_dwell=no_dwell)
EXPt.set_viz_scheme(**kwargs)
Пример #28
0
 def getGenerator(self, options):
     return Generator.SumGenerator(self.processSignal,
                                   self.processShortSignal,
                                   self.innerPipeline, self.signalPipeline,
                                   self.processSumSignals)
Пример #29
0
    elif args.dec_type == 'gridrnn_dcnn':
        from decoders import gridrnn_Decoder as Decoder
    elif args.dec_type == 'hidden':
        from decoders import Hidden_Decoder as Decoder
    else:
        from decoders import gridrnn_Decoder as Decoder

    if args.d_type == 'dcgan':
        from discriminators import DCGAN_discriminator as Discriminator
    #elif args.d_type == 'hidden':
    else:
        from discriminators import Hidden_discriminator as Discriminator
    #else:
    #from discriminators import DCGAN_discriminator as Discriminator

    generator = Generator(args)
    decoder = Decoder(args)
    discriminator = Discriminator(args)

    if cuda:
        generator.cuda()
        discriminator.cuda()
        decoder.cuda()
        BCELoss.cuda()
        MSELoss.cuda()
    else:
        print('models', generator, discriminator, decoder)

    # Initialize weights
    generator.apply(weights_init_normal)
    discriminator.apply(weights_init_normal)
    img_path = args.img

    is_targeted = False
    if target in range(0, 10):
        is_targeted = True

    # load target_model
    f = getattr(target_models, model_name)(1, 10)
    checkpoint_path_f = os.path.join('saved', 'target_models',
                                     'best_%s_mnist.pth.tar' % (model_name))
    checkpoint_f = torch.load(checkpoint_path_f, map_location='cpu')
    f.load_state_dict(checkpoint_f["state_dict"])
    f.eval()

    # load corresponding generator
    G = Generator()
    checkpoint_name_G = '%s_target_%d.pth.tar' % (
        model_name,
        target) if is_targeted else '%s_untargeted.pth.tar' % (model_name)
    checkpoint_path_G = os.path.join('saved', 'generators',
                                     'bound_%.1f' % (thres), checkpoint_name_G)
    checkpoint_G = torch.load(checkpoint_path_G, map_location='cpu')
    G.load_state_dict(checkpoint_G['state_dict'])
    G.eval()

    # load img and preprocess as required by f and G
    orig = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
    img = orig.copy().astype(np.float32)
    img = img[None, None, :, :] / 255.0

    x = torch.from_numpy(img)