Ejemplo n.º 1
0
def predict():
    gender = mp.transform_gender(request.args.get('gender'))
    married = mp.transform_married(request.args.get('married'))
    dependents = int(request.args.get('dependents'))
    education = mp.transform_education(request.args.get('education'))
    selfemployed = mp.transform_self_emp(request.args.get('selfemployed'))
    applicant_income = int(request.args.get('app_income'))
    co_appllicant_income = int(request.args.get('co_income'))
    loan_amount = int(request.args.get('loan_amount'))
    loan_amt_term = int(request.args.get('loan_amt_term'))
    credit_hist = int(request.args.get('credit_hist'))
    prop_area = mp.transform_prop_area(request.args.get('prop_area'))

    trained_model = pickle.load(open('finalModel', 'rb'))
    result = trained_model.predict_proba([[
        gender, married, dependents, education, selfemployed, applicant_income,
        co_appllicant_income, loan_amount, loan_amt_term, credit_hist,
        prop_area
    ]])
    status_false = result[0][0]
    status_true = result[0][1]
    if status_false > status_true:
        return "The Result of The Prediction: <b>Loan Status for this user is False with Probablity " + str(
            status_false) + "</b>."
    else:
        return "The Result of The Prediction: <b>Loan Status for this user is True with Probablity " + str(
            status_true) + "</b>."
Ejemplo n.º 2
0
def init_mapping(representation):
    ### load the mapping

    mapping = {}
    shared_mapping_obj = Mapping.get_mapping_instance(
        FLAGS.shared_mapping_class)

    for lang in representation.keys():
        if representation[lang] in ['phonetic', 'onehot_and_phonetic']:
            mapping[lang] = shared_mapping_obj
        elif representation[lang] == 'onehot_shared':
            mapping[lang] = shared_mapping_obj
        elif representation[lang] == 'onehot':
            mapping[lang] = Mapping.CharacterMapping()

        with open(FLAGS.mapping_dir + '/' + 'mapping_' + lang + '.json',
                  'r') as mapping_file:
            mapping[lang].load_mapping(mapping_file)

    ## Print Representation and Mappings
    print 'Mapping'
    print mapping

    print 'Vocabulary Statitics'
    for lang in representation.keys():
        print '{}: {}'.format(lang, mapping[lang].get_vocab_size())

    return mapping
Ejemplo n.º 3
0
def createTower(liste, degats, element, vitesseA, currentTower, colorint, MAP,
                colonne, ligne, price, money, sprite):
    if currentTower != 0:
        cursor = pygame.Rect(pygame.mouse.get_pos()[0],
                             pygame.mouse.get_pos()[1], 1, 1)
        test = cursor.collidelist(Mapping.GetCaseRectList(colonne, ligne, MAP))
        if test != -1:
            if test < colonne:
                currCase = Mapping.GetCase(test, 0, MAP)
            else:
                currCase = Mapping.GetCase(
                    test - colonne * int(test / colonne), int(test / colonne),
                    MAP)
            if currCase.IsPath == False and currCase.HasTower == False:
                sprite = pygame.transform.scale(
                    sprite, (int(currCase.width), int(currCase.length)))
                newTower = tower(degats, element, vitesseA, colorint, sprite)
                newTower.setPos(currCase.innerRect)
                liste.append(newTower)
                currentTower = 0
                currCase.Lock()
                MAP[int(test /
                        colonne)][test -
                                  colonne * int(test / colonne)] = currCase
                money = money - price
    return currentTower, liste, MAP, money

    return liste
Ejemplo n.º 4
0
def pred_filter(infile1, infile2):

    mapped_pred = Mapping.mapper_pred(infile1)
    mapped_bench = Mapping.mapper_bench(infile2)

    outfile_name = infile1 + '_pred_set.txt'
    outfile_handle = open(outfile_name, 'w')

    bench = defaultdict()

    b_file = open(mapped_bench,'r')
    for lines in b_file:
        fields = lines.strip().split()
        bench[fields[0]] = 1
        
    b_file.close()
    prot_ann = defaultdict()

    print "Filtering prediction data....\n"

    prediction_file = open(mapped_pred,'r')
    for data in prediction_file:
        fields = data.strip().split()
        if bench.has_key(fields[0]):
            print >> outfile_handle, fields[0] + '\t' + fields[1] + '\t' + fields[2]
            prot_ann[fields[0]] = 1

    prediction_file.close()
    prot_ann.clear()
    bench.clear()

    return outfile_name, mapped_bench
Ejemplo n.º 5
0
    def _command_line_mappings(self, mappings):
        if not mappings: return

        #try:
        Mapping.create_mapping('command-line')
        for m in mappings:
            (model, device) = m.split(':')
            Mapping.bind(model, device)
Ejemplo n.º 6
0
 def read_mapping(self, t="ASTAR", fname=None):
     if t == "ASTAR":
         self.mappings.append(Mapping(self))
         self.mappings[-1].read_astar_mapping(fname)
     elif t == "KEGG":
         self.mappings.append(Mapping(self))
         self.mappings[-1].read_kegg_mapping(fname)
     elif t == "ARITA":
         self.mappings.append(Mapping(self))
         self.mappings[-1].read_arita_mapping(fname)
Ejemplo n.º 7
0
def get_seq2seq_loss(lang_pairs, parallel_data, seq_loss_op, batch_sequences,
                     batch_sequence_masks, batch_sequence_lengths,
                     batch_sequences_2, batch_sequence_masks_2,
                     batch_sequence_lengths_2, dropout_keep_prob,
                     prefix_tgtlang, prefix_srclang, mapping,
                     max_sequence_length):
    """
    convenience function to compute the translation loss (cross entropy) 

    lang_pairs: list of language pair tuples. 
    parallel_data: Dictionary of ParallelDataReader object for various language pairs                 

    seq_loss_op: dictionary of sequence loss operation for every language pair 
    batch_sequences, batch_sequence_masks, batch_sequence_lengths,
        batch_sequences_2, batch_sequence_masks_2, batch_sequence_lengths_2,
        dropout_keep_prob: placeholder variables 

    prefix_srclang: see commandline flags 
    prefix_tgtlang: see commandline flags 
    mapping: Dictionary of mapping objects for each language
    """

    ### start computation
    validation_loss = 0.0
    for src_lang, tgt_lang in lang_pairs:

        lang_pair = (src_lang, tgt_lang)

        sequences,sequence_masks,sequence_lengths,sequences_2,sequence_masks_2,sequence_lengths_2 = \
                    parallel_data[lang_pair].get_data()

        if prefix_tgtlang:
            sequences, sequence_masks, sequence_lengths = Mapping.prefix_sequence_with_token(
                sequences, sequence_masks, sequence_lengths, tgt_lang,
                mapping[tgt_lang])
        if prefix_srclang:
            sequences, sequence_masks, sequence_lengths = Mapping.prefix_sequence_with_token(
                sequences, sequence_masks, sequence_lengths, src_lang,
                mapping[src_lang])

        validation_loss += sess.run(seq_loss_op[lang_pair],
                                    feed_dict={
                                        batch_sequences: sequences,
                                        batch_sequence_masks: sequence_masks,
                                        batch_sequence_lengths:
                                        sequence_lengths,
                                        batch_sequences_2: sequences_2,
                                        batch_sequence_masks_2:
                                        sequence_masks_2,
                                        batch_sequence_lengths_2:
                                        sequence_lengths_2,
                                        dropout_keep_prob: 1.0
                                    })

    return validation_loss
Ejemplo n.º 8
0
def kijiji(city, pages):

    listings = Mapping.listings(city, pages)

    condodata = []

    #Headers for condodata array
    condodata.append([
        'URL', 'Address', 'UnitType', 'AgreementType', 'Price', 'Date',
        'MoveInDate', 'SquareFeet', 'Bedrooms', 'Bathrooms', 'Furnished',
        'PostalCode', 'PostalTop', 'PostalBottom', 'Hydro', 'Heat', 'Water',
        'Cable', 'Internet', 'LaundryIn', 'LaundryOut', 'Parking', 'Landline',
        'AirConditioning', 'PetFriendly', 'Smoking', 'Yard', 'Balcony'
    ])

    # Pulls "market data" from each listing
    for post in listings:

        kijijii = requests.get(post)

        soup = BeautifulSoup(kijijii.text, 'html.parser')

        info = Retrieve.datapoints(soup)

        address = Retrieve.location(soup)

        print(address)

        postalcode = Retrieve.postal(address)

        price = Retrieve.price(soup)

        date = Retrieve.dateposted(soup)

        condodata.append([
            post, address,
            info.setdefault(condodata[0][2], 'NA'),
            info.setdefault(condodata[0][3], 'NA'), price, date,
            info.setdefault(condodata[0][6], 'NA'),
            info.setdefault(condodata[0][7], 'NA'),
            info.setdefault(condodata[0][8], 'NA'),
            info.setdefault(condodata[0][9], 'NA'),
            info.setdefault(condodata[0][10], 'NA'), postalcode,
            postalcode[:3], postalcode[-3:],
            info.setdefault(condodata[0][14], 'NA'),
            info.setdefault(condodata[0][15], 'NA'),
            info.setdefault(condodata[0][16], 'NA'),
            info.setdefault(condodata[0][17], 'NA'),
            info.setdefault(condodata[0][18], 'NA'),
            info.setdefault(condodata[0][19], 'NA'),
            info.setdefault(condodata[0][20], 'NA'),
            info.setdefault(condodata[0][21], 'NA'),
            info.setdefault(condodata[0][22], 'NA'),
            info.setdefault(condodata[0][23], 'NA'),
            info.setdefault(condodata[0][24], 'NA'),
            info.setdefault(condodata[0][25], 'NA'),
            info.setdefault(condodata[0][26], 'NA'),
            info.setdefault(condodata[0][27], 'NA')
        ])
    return condodata
Ejemplo n.º 9
0
    def _combine_hosts(self):
        ct = ConfiguredTest()
        ct.resources = Resources.resources.resources()
        ct.hosts = {}
        ct.end_policy = Schedule.get_schedule().test_end_policy()
        ct.setup_phase_delay = Schedule.get_schedule().setup_phase_delay()
        ct.triggers = Schedule.get_schedule().triggers()

        for h in Model.get_model().hosts():
            host = ConfiguredHost()
            host.model = h
            host.device = h.bound()
            host.schedule = Schedule.get_schedule().host_schedule(h['name'])

            resources = set(h.needed_resources())
            for event in host.schedule:
                resources.update(event.command().needed_resources())

            def resolve_resource(r):
                if isinstance(r, str):
                    return Utils.resolve_resource_name(r)
                return r

            host.resources = set(map(resolve_resource, resources))

            ct.hosts[h['name']] = host

        ct.sanity_check()

        ct.model = Model.get_model()
        ct.laboratory = Laboratory.get_laboratory()
        ct.schedule = Schedule.get_schedule()
        ct.mapping = Mapping.get_mapping()

        self._configured_test = ct
Ejemplo n.º 10
0
def predict():
	df= pd.read_csv("Job titles and industries.csv")
	# Features and Labels
	df['label'] = df['industry'].map({'IT': 0, 'Marketing': 1,'Education':2,'Accountancy':3})
	x = df['job title']
	y = df['label']
	
	# Extract Feature With CountVectorizer
	tfidf = TfidfVectorizer()
	x = tfidf.fit_transform(df["job title"]).toarray() # Fit the Data

	from sklearn.model_selection import train_test_split
	x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.25)


	model=RandomForestClassifier(n_estimators=100)
	model.fit(x_train,y_train)

	jop_title=request.args.get('jop_title')
	data=mp.run(jop_title)
	vect=tfidf.transform([data]).toarray()
	result = model.predict(vect)

	if result[0]==0:
		return "The Result of The Prediction: <b> IT"
	elif result[0]==1:
		return "The Result of The Prediction: <b> Marketing"
	elif result[0]==2:
		return "The Result of The Prediction: <b> Education"
	elif result[0]==3:
		return "The Result of The Prediction: <b> Accountancy"
Ejemplo n.º 11
0
def predict():
    type = mp.transform_type(request.args.get('type'))
    amount = float(request.args.get('amount'))
    old_bal_org = float(request.args.get('old_bal_org'))
    new_bal_org = float(request.args.get('new_bal_org'))
    name_dest = mp.transform_nameDest(request.args.get('name_dest'))
    old_bal_det = float(request.args.get('old_bal_det'))
    new_bal_det = float(request.args.get('new_bal_det'))

    trained_model = pickle.load(open('finalModel', 'rb'))
    result = trained_model.predict([[
        type, amount, old_bal_org, new_bal_org, name_dest, old_bal_det,
        new_bal_det
    ]])
    if result == 1:
        return "The Result of The Prediction: <b> Take Care !! This Transaction is Fraud </b>."
    else:
        return "The Result of The Prediction: <b>Good !! This Transaction is not Fraud </b>."
Ejemplo n.º 12
0
    def _sanity_check(self):
        if not Model.get_model():
            raise Exceptions.SanityError("No model defined. You need to create a model. Did you forget to use 'create_model(name)' in your configuration?")

        if not Laboratory.get_laboratory():
            raise Exceptions.SanityError("No laboratory defined. You need to create a laboratory. Did you forget to use 'create_laboratory(name)' in your configuration?")

        if not Mapping.get_mapping():
            raise Exceptions.SanityError("No mapping defined. You need to create a mapping. Did you forget to use 'create_mapping(name)' in your configuration?")

        if not Schedule.get_schedule():
            raise Exceptions.SanityError("No schedule defined. You need to create a schedule. Did you forget to use 'create_schedule(name)' in your configuration?")
Ejemplo n.º 13
0
##### B. Read in the structure

struc = Structure(options["-f"].value, strict=options["-strict"].value)

##### C. Set the mapping dictionary

# Convert force field tags to lower case
# Default is backmapping from MARTINI to GROMOS53A6
# If to_ff == martini, default from_ff = gromos
to_ff = options["-to"] and options["-to"].value.lower() or "gromos"
if to_ff == "martini" and not options["-from"]:
    from_ff = "gromos"
else:
    from_ff = options["-from"] and options["-from"].value.lower() or "martini"
mapping = Mapping.Mapping(options["-mapdir"].value).get(source=from_ff,
                                                        target=to_ff)
backmapping = levels[from_ff] > levels[to_ff]
reslist = mapping.keys()

##### D. Iterate over atoms to write out, based on residue names

# Copy the residue list from the target topology
# This gives a list we can pop from, while keeping
# the original.
# The solvent residues are skipped.
topresidues = None
if top:
    topresidues = [i for i in top.residues]
    print(options["-atomlist"], options["-atomlist"].value)
    if options["-atomlist"]:
        atm = open(options["-atomlist"].value, "w")
Ejemplo n.º 14
0
def run_print_vars(args):
    """
    Debugging: print the names of the variables in the model
    """

    ## check for required parameters
    if args.lang is None:
        print 'ERROR: --lang has to be set'
        sys.exit(1)

    if args.model_fname is None:
        print 'ERROR: --model_fname has to be set'
        sys.exit(1)

    if args.mapping_fname is None:
        print 'ERROR: --mapping_fname has to be set'
        sys.exit(1)

    #######################################
    # Reading data and creating mappings  #
    #######################################

    # Creating mapping object to store char-id mappings
    mapping = Mapping.get_mapping_instance(args.mapping_class)
    with open(args.mapping_fname, 'r') as mapping_json_file:
        mapping.load_mapping(mapping_json_file)
    print 'Mapping'
    print mapping

    print 'Vocabulary Statitics'
    print '{}: {}'.format(args.lang, mapping.get_vocab_size())

    print 'Finished Reading Data'

    ###################################################################
    #    Interacting with model and creating computation graph        #
    ###################################################################

    # Creating Model object
    model = LanguageModel(args.lang, mapping, args.representation,
                          args.max_seq_length, args.embedding_size,
                          args.rnn_size)

    for v in tf.all_variables():
        print v.name

    ## Creating placeholder for sequences, masks and lengths and dropout keep probability
    pl_batch_sequences = tf.placeholder(shape=[None, args.max_seq_length],
                                        dtype=tf.int32)
    pl_batch_sequence_lengths = tf.placeholder(shape=[None], dtype=tf.float32)
    loss_op = model.average_loss(pl_batch_sequences, pl_batch_sequence_lengths,
                                 1.0)

    #Saving model
    saver = tf.train.Saver(max_to_keep=0)

    #Start Session
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    #sess.run(tf.initialize_all_variables())
    saver.restore(sess, args.model_fname)

    print 'after loading'

    for v in tf.all_variables():
        print v.name
Ejemplo n.º 15
0
def run_test(args):
    """
    Test a trained language mode
    """

    ## check for required parameters
    if args.lang is None:
        print 'ERROR: --lang has to be set'
        sys.exit(1)

    if args.model_fname is None:
        print 'ERROR: --model_fname has to be set'
        sys.exit(1)

    if args.mapping_fname is None:
        print 'ERROR: --mapping_fname has to be set'
        sys.exit(1)

    if args.in_fname is None:
        print 'ERROR: --in_fname has to be set'
        sys.exit(1)

    #######################################
    # Reading data and creating mappings  #
    #######################################

    # Creating mapping object to store char-id mappings
    mapping = Mapping.get_mapping_instance(args.mapping_class)
    with open(args.mapping_fname, 'r') as mapping_json_file:
        mapping.load_mapping(mapping_json_file)
    print 'Mapping'
    print mapping

    print 'Vocabulary Statitics'
    print '{}: {}'.format(args.lang, mapping.get_vocab_size())

    ## Reading test data
    test_data = MonoDataReader.MonoDataReader(args.lang, args.in_fname,
                                              mapping, args.max_seq_length)

    print 'Finished Reading Data'

    ###################################################################
    #    Interacting with model and creating computation graph        #
    ###################################################################

    # Creating Model object
    model = LanguageModel(args.lang, mapping, args.representation,
                          args.max_seq_length, args.embedding_size,
                          args.rnn_size)

    ## Creating placeholder for sequences, masks and lengths and dropout keep probability
    pl_batch_sequences = tf.placeholder(shape=[None, args.max_seq_length],
                                        dtype=tf.int32)
    pl_batch_sequence_lengths = tf.placeholder(shape=[None], dtype=tf.float32)
    loss_op = model.average_loss(pl_batch_sequences, pl_batch_sequence_lengths,
                                 1.0)

    #Saving model
    saver = tf.train.Saver(max_to_keep=0)

    #Start Session
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    #sess.run(tf.initialize_all_variables())
    saver.restore(sess, args.model_fname)

    print "Session started"

    ## TEST LOSS
    test_start_time = time.time()
    test_loss = get_average_loss(test_data, loss_op, pl_batch_sequences,
                                 pl_batch_sequence_lengths, sess)
    test_end_time = time.time()
    epoch_test_time = (test_end_time - test_start_time)

    print "Test Perplexity: {}".format(test_loss)
    print "Test Time (hh:mm:ss)::: {}".format(
        utilities.formatted_timeinterval(epoch_test_time))
Ejemplo n.º 16
0
def convert_address_to_latlng(address_string):
	return Mapping.address_to_geo(address_string)
Ejemplo n.º 17
0
##### B. Read in the structure

struc = Structure(options["-f"].value, strict=options["-strict"].value)

##### C. Set the mapping dictionary

# Convert force field tags to lower case
# Default is backmapping from MARTINI to GROMOS53A6
# If to_ff == martini, default from_ff = gromos
to_ff = options["-to"] and options["-to"].value.lower() or "gromos"
if to_ff == "martini" and not options["-from"]:
    from_ff = "gromos"
else:
    from_ff = options["-from"] and options["-from"].value.lower() or "martini"
mapping = Mapping.get(source=from_ff, target=to_ff)
backmapping = levels[from_ff] > levels[to_ff]
reslist = mapping.keys()

##### D. Iterate over atoms to write out, based on residue names

# Copy the residue list from the target topology
# This gives a list we can pop from, while keeping
# the original.
# The solvent residues are skipped.
topresidues = None
if top:
    topresidues = [i for i in top.residues]
    if options["-atomlist"]:
        atm = open(options["-atomlist"].value, "w")
        topatm = [j for i in topresidues for j in i]
Ejemplo n.º 18
0
    ### parse representation argument 
    if args.representation in ['onehot','onehot_shared','phonetic','onehot_and_phonetic']: 
        representation = {} 
        for lang in lang_pair: 
            representation[lang]=args.representation 
    else: 
        representation = dict([ x.split(':') for x in args.representation.split(',') ])

    ## Print Representation and Mappings 
    print 'Representation'
    print representation 

    ### load the mapping
    mapping={}
    shared_mapping_obj = Mapping.get_mapping_instance(shared_mapping_class) 

    for lang in representation.keys(): 
        if representation[lang] in ['phonetic','onehot_and_phonetic']: 
            mapping[lang]=shared_mapping_obj
        elif representation[lang]=='onehot_shared': 
            mapping[lang]=shared_mapping_obj
        elif representation[lang]=='onehot': 
            mapping[lang]=Mapping.CharacterMapping()

        with open(mapping_dir+'/'+'mapping_'+lang+'.json','r') as mapping_file:     
            mapping[lang].load_mapping(mapping_file)

    ## Print Representation and Mappings 
    print 'Mapping'
    print mapping
Ejemplo n.º 19
0
def FitTOD(tod,
           ra,
           dec,
           obs,
           clon,
           clat,
           cpang,
           prefix='',
           destripe=False,
           mode='mode1',
           justOffsets=True,
           doPlots=True):
    # Beam solid angle aken from James' report on wiki Optics
    nubeam = np.array([26., 33., 40.])
    srbeam = np.array([2.1842e-6, 1.6771e-6, 1.4828e-6])
    pmdl = interp1d(nubeam, srbeam)  # straight interpolation

    nHorns = tod.shape[0]
    nSidebands = tod.shape[1]
    nChans = tod.shape[2]
    nSamps = tod.shape[3]

    if mode == 'mode1':
        nParams = 5
    elif mode == 'mode2':
        nParams = 6
    else:
        nParams = 0
        print('WARNING: No fitting method selected')

    # Define the pixel grid
    # Pixel coordinates on sky
    wcs, xr, yr = Mapping.DefineWCS(naxis, cdelt, crval)
    r = np.sqrt((xr)**2 + (yr)**2)

    # Pixel coordinates in image
    xpix, ypix = np.meshgrid(np.arange(xr.shape[0]),
                             np.arange(yr.shape[1]),
                             indexing='ij')
    backgroundPixels = np.sqrt((xpix - xr.shape[0] / 2.)**2 +
                               (ypix - xr.shape[1] / 2.)**2) > xr.shape[0] / 3

    # Calculate RMS from adjacent pairs
    rms = CalcRMS(tod)

    # Set up data containers
    crossings = np.zeros(nHorns)  # Crossing points
    time = np.arange(nSamps)  # Useful for interpolation

    if justOffsets:
        #P1 = np.zeros((nHorns, nParams))
        #P1e = np.zeros((nHorns,  nParams))
        #chis = np.zeros((nHorns))
        P1 = np.zeros((nHorns, nSidebands, nChans, nParams - 2))
        errors = np.zeros((nHorns, nSidebands, nChans, nParams - 2))
        Pestout = np.zeros((nHorns, nParams))
        Pstdout = np.zeros((nHorns, nParams))

    else:
        P1 = np.zeros((nHorns, nSidebands, nChans, nParams - 2))
        errors = np.zeros((nHorns, nSidebands, nChans, nParams - 2))
        Pestout = np.zeros((nHorns, nParams))
        Pstdout = np.zeros((nHorns, nParams))

    #fig = pyplot.figure(figsize=(16,16))
    for i in range(nHorns):

        # Rotate the RA/DEC to the source centre
        x, y = Pointing.Rotate(ra[i, :], dec[i, :], clon, clat, -cpang[i, :])

        r = np.sqrt((x)**2 + (y)**2)
        close = (r < 12.5 / 60.)
        if np.sum((r < 6. / 60.)) < 10:
            print('Source not observed')
            continue

        # Average the data into a single timestream
        print(tod.shape)
        if tod.shape[2] == 1:
            todTemp = tod[0, 0, 0, :]
        else:
            todTemp = np.mean(
                np.mean(tod[i, :, :], axis=0),
                axis=0)  # average all data to get location of peak in data:

        removeNaN(todTemp)

        rmsTemp = CalcRMS(todTemp)

        try:
            todBackground = RemoveBackground(todTemp,
                                             rmsTemp,
                                             close,
                                             sampleRate=50,
                                             cutoff=1.)
            todTemp -= todBackground
        except (ValueError, IndexError):
            todTemp -= np.median(todTemp)

        offsets = 0
        print(crval)
        m, hits = Mapping.MakeMapSimple(todTemp, x, y, wcs)
        resid = todTemp[:todTemp.size // 2 * 2:2] - todTemp[1:todTemp.size //
                                                            2 * 2:2]
        residmap, rh = Mapping.MakeMapSimple(resid,
                                             x[:(todTemp.size // 2) * 2:2],
                                             y[:(todTemp.size // 2) * 2:2],
                                             wcs)
        m = m / hits
        residmap = residmap / rh
        mapNoise = np.nanstd(residmap) / np.sqrt(2)

        m -= np.nanmedian(m)
        m[np.isnan(m)] = 0.

        ipix = Mapping.ang2pixWCS(wcs, x, y).astype('int')
        gd = (np.isnan(ipix) == False) & (ipix >= 0) & (ipix < m.size)
        # Get an estimate of the peak location
        x0, y0, xpix0, ypix0 = ImagePeaks(m, xr, yr, mapNoise)

        if isinstance(x0, type(None)):
            print('No peak found')
            continue

        # Just select the near data and updated peak location
        r = np.sqrt((x - x0)**2 + (y - y0)**2)
        close = (r < 12.5 / 60.)
        near = (r < 25. / 60.) & (r > 15 / 60.)
        far = (r > 30. / 60.)
        fitselect = (r < 10. / 60.) & (np.isnan(todTemp) == False)
        plotselect = (r < 45. / 60.)

        if np.sum(fitselect) < 20:
            continue

        fitdata = todTemp[fitselect]
        fitra = x[fitselect]
        fitdec = y[fitselect]

        if mode == 'mode2':
            P0 = [
                np.max(fitdata) - np.median(fitdata), 4. / 60. / 2.355,
                4. / 60. / 2.355, x0, y0,
                np.median(fitdata)
            ]

        print(P0, lnprior(P0))
        fout = leastsq(ErrorLstSq,
                       P0,
                       args=(fitra, fitdec, fitdata, 0, 0),
                       full_output=True)
        #P0 = fout[0]
        ndim, nwalkers = len(P0), 100

        #pos = np.zeros((nwalkers, ndim))
        #pos[:,0] = np.abs(P0[0])*1e-4*np.random.randn(nwalkers)
        #pos[:,1:3] = np.abs(P0[1:3])[np.newaxis,:]*1e-4*np.random.randn((nwalkers,2))
        ###pos[:,3:5] = np.abs(P0[3:5])[np.newaxis,:]+0.1*np.random.randn((nwalkers,2))
        #pos[:,5] =  np.abs(P0[5])*1e-4*np.random.randn(nwalkers)
        #pos = pos.T

        pos = [
            np.array(P0) + 1e-4 * np.random.randn(ndim)
            for iwalker in range(nwalkers)
        ]
        sampler = emcee.EnsembleSampler(nwalkers,
                                        ndim,
                                        lnprob,
                                        args=(fitra, fitdec, fitdata, rmsTemp))
        sampler.run_mcmc(pos, 1200)
        samples = sampler.chain[:, 500:sampler.chain.shape[1]:3, :].reshape(
            (-1, ndim))
        print(samples.shape)
        Pest = np.mean(samples, axis=0)
        Pstd = np.std(samples, axis=0)
        #Pest = fout[0]
        chi2 = np.sum((fitdata - Gauss2d2FWHM(Pest, fitra, fitdec, 0, 0))**2 /
                      rmsTemp**2) / (fitdata.size - len(Pest))
        print(np.std(samples, axis=0))
        if doPlots:
            pyplot.plot(fitdata, label='data')
            pyplot.plot(Gauss2d2FWHM(Pest, fitra, fitdec, 0, 0), label='fit')
            pyplot.legend(loc='upper right')
            pyplot.ylabel('T (K)')
            pyplot.xlabel('Sample')
            pyplot.text(0.05,
                        0.9,
                        r'$\chi^2$=' + '{:.2f}'.format(chi2),
                        transform=pyplot.gca().transAxes)
            pyplot.title('Horn {:d} obs {}'.format(i + 1, prefix))
            pyplot.savefig('PeakFitPlots/PeakFits_Horn{:d}_{}.png'.format(
                i + 1, prefix),
                           bbox_inches='tight')
            pyplot.clf()
            fig = corner.corner(samples)
            pyplot.title('Horn {:d} obs {}'.format(i + 1, prefix))
            pyplot.savefig('PeakFitPlots/Corner_Horn{:d}_{}.png'.format(
                i + 1, prefix),
                           bbox_inches='tight')
            pyplot.clf()
            del fig
    # pyplot.figure()
    #pyplot.plot(samples[:,0])
    #pyplot.show()
        if justOffsets:
            #P1[i,:] = Pest
            #P1e[i,:] = np.std(samples, axis=0)
            Pestout[i, :] = Pest
            Pstdout[i, :] = Pstd
            #chis[i] = chi2
            continue
        else:
            Pestout[i, :] = Pest
            Pstdout[i, :] = Pstd
        print(x0, y0)
        siga, sigb = Pest[1:3]
        x0, y0 = Pest[3:5]
        print(x0, y0)

        for j in range(nSidebands):

            for k in range(nChans):

                try:
                    todBackground = RemoveBackground(tod[i, j, k, :],
                                                     rms[i, j, k],
                                                     close,
                                                     sampleRate=50,
                                                     cutoff=0.1)
                except (IndexError, ValueError):
                    todBackground = np.median(tod[i, j, k, :])

                tod[i, j, k, :] -= todBackground
                fitdata = tod[i, j, k, fitselect]
                fitra = x[fitselect]
                fitdec = y[fitselect]

                amax = np.argmax(fitdata)

                if mode == 'mode1':
                    P0 = [
                        np.max(fitdata) - np.median(fitdata), 4. / 60. / 2.355,
                        x0, y0,
                        np.median(fitdata)
                    ]
                    fout = leastsq(ErrorLstSq,
                                   P0,
                                   args=(fitra, fitdec, fitdata, 0, 0),
                                   full_output=True)
                    fbootmean, fbootstd = Bootstrap(P0, fitdata, fitra, fitdec,
                                                    ErrorLstSq)
                    fitModel = Gauss2d
                elif mode == 'mode2':
                    P0 = [
                        np.max(fitdata) - np.median(fitdata), Pest[1], Pest[2],
                        np.median(fitdata)
                    ]
                    fout = leastsq(ErrorLstSq2FWHM,
                                   P0,
                                   args=(fitra, fitdec, fitdata, x0, y0),
                                   full_output=True)
                    fbootmean, fbootstd = Bootstrap(P0, fitdata, fitra, fitdec,
                                                    ErrorLstSq2FWHM)
                    fitModel = Gauss2d2FWHMFixed
                else:
                    print('Warning: No fitting method selected')

                if isinstance(fout[1], type(None)):
                    continue

                #fout = np.mean(samples,axis=0),
                P1[i, j, k, :] = fout[0]
                errors[i, j, k, :] = fbootstd
                #pyplot.plot(fitdata-fitModel(fout[0], fitra, fitdec, x0,y0), label='data')
                #pyplot.legend(loc='upper right')
                #pyplot.ylabel('T (K)')
                #pyplot.xlabel('Sample')
                #pyplot.text(0.05,0.9, r'$\chi^2$='+'{:.2f}'.format(chi2), transform=pyplot.gca().transAxes)
                #pyplot.title('Horn {:d} obs {}'.format(i+1, prefix))
                #pyplot.show()

            #pyplot.errorbar(np.arange(P1.shape[2]), P1[i,j,:,1]*60, yerr=errors[i,j,:,1]*60)
            #pyplot.show()

        cross = np.argmax(
            fitModel(np.median(P1[i, 0, :, :], axis=0), x, y, x0, y0))
        print(cross, nHorns)
        crossings[i] = cross
    if justOffsets:
        return P1, errors, Pestout, Pstdout, crossings  #  P1, P1e, chis
    else:
        return P1, errors, Pestout, Pstdout, crossings
Ejemplo n.º 20
0
    initTemp = pygame.time.get_ticks()
    seconde = int(initTemp / 1000)
    return seconde


couleurFond = (176, 224, 230)
X = pygame.display.Info().current_w
Y = pygame.display.Info().current_h

events = pygame.event.get()

fenetre = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)

plyManager = Player()

Mapping.MAP = Mapping.InitMap(fenetre, Mapping.MAP)

seconde = 0

Jeu = True

while Jeu:
    events = pygame.event.get()
    fenetre.fill(couleurFond)
    seconde = Time(seconde)
    if plyManager.IsPlaying == False:
        #DimissPopup
        menu.activesPop = menu.DimissPopup(menu.activesPop, events)
        #drawMap
        Mapping.GRID, Mapping.caseWid, Mapping.caseLen = Mapping.drawMap(
            X, Y, Mapping.nbreColonne, Mapping.nbreLigne, fenetre)
Ejemplo n.º 21
0
                "(but not in a two-color way)")
        parser.add_option('-m', dest='map', default=False,
                help="Input the map to estimate positions")
        parser.add_option('-o', dest='other', default='655')
        (opts, args) = parser.parse_args()

        flagdict = defaultdict((lambda : 'ERR'), {'1': '655', '2':'585'})
        outfile = None
        framesets = []
        markernums = defaultdict(int)
        print glob(args[0] + '*')
        filenum = 0
        
        if opts.map is not False:
            import Mapping
            mapping = Mapping.loadmapping(opts.map)

        for fname in glob(args[0] + '*'):
            XMLTree = minidom.parse(fname)
            

            # Read out frame numbers from the file name
            if 'frame' in fname:
                frames = fname.split('frame')[1].rsplit('.xml')[0]
                startframe, endframe = map(int, frames.split('-'))
                framesets.append((startframe, endframe))
            else:
                framenums = map(int, getAllData(XMLTree, 'MarkerZ'))
                startframe = min(framenums)
                endframe = max(framenums)
                framesets = getFrameSets(XMLTree, opts.color)
Ejemplo n.º 22
0
def workflow(args : list):
    """Main workflow of backward.py
    
    Parameters
    ----------
    args : List(Str)
        Commandline arguments
    """    
    global options
    options = [
#   option           type         number     default   description
    ("-f",        Option(str,           1,         None, "Input  GRO/PDB structure")),
    ("-o",        Option(str,           1,         None, "Output GRO/PDB structure")),
    ("-raw",      Option(str,           1,         None, "Projected structure before geometric modifications")),
#    ("-c",        Option(str,           1,         None, "Output GRO/PDB structure of expanded CG beads for position restraints")),
    ("-n",        Option(str,           1,         None, "Output NDX index file with default groups")),
    ("-p",        Option(str,           1,         None, "Atomistic target topology")),
    ("-po",       Option(str,           1,         None, "Output target topology with matching molecules list")), 
    ("-pp",       Option(str,           1,         None, "Processed target topology, with resolved #includes")), 
    ("-atomlist", Option(str,           1,         None, "Atomlist according to target topology")),
    ("-fc",       Option(float,         1,          200, "Position restraint force constant")),
    ("-to",       Option(str,           1,         None, "Output force field")),
    ("-from",     Option(str,           1,         None, "Input force field")),
    ("-strict",   Option(bool,          0,         None, "Use strict format for PDB files")),
    ("-nt",       Option(bool,          0,         None, "Use neutral termini for proteins")),
    ("-sol",      Option(bool,          0,         None, "Write water")),
    ("-solname",  Option(str,           1,        "SOL", "Residue name for solvent molecules")),
    ("-kick",     Option(float,         1,            0, "Random kick added to output atom positions")),
    ("-nopbc",    Option(bool,          0,         None, "Don't try to unbreak residues (like when having large residues in a small box)")),
    ("-mapdir",   Option(str,           1,         None, "Directory where to look for the mapping files")),
    ]

    if '-h' in args or '--help' in args:
        print("\n",__file__)
        print(desc or "\nSomeone ought to write a description for this script...\n")
        for thing in options:
            print(type(thing) != str and "%10s  %s"%(thing[0],thing[1].description) or thing)
        print()
        sys.exit()


    # Convert the option list to a dictionary, discarding all comments
    options = dict([i for i in options if not type(i) == str])


    # Process the command line - list the options that were given
    opts = [] 
    while args:
        opts.append(args.pop(0))
        options[opts[-1]].setvalue([args.pop(0) for i in range(options[opts[-1]].num)])


    ## DONE PARSING ARGUMENTS ##
        

    ################################################################################

    ## MAPPING ##


    ##### A. If a target topology was provided, read it in


    top = options["-p"] and Topology(options["-p"].value,out=options["-pp"].value)


    ##### B. Read in the structure


    struc = Structure(options["-f"].value,strict=options["-strict"].value)


    ##### C. Set the mapping dictionary


    # Convert force field tags to lower case 
    # Default is backmapping from MARTINI to GROMOS53A6
    # If to_ff == martini, default from_ff = gromos
    to_ff = options["-to"] and options["-to"].value.lower() or "gromos"
    if to_ff == "martini" and not options["-from"]:
        from_ff = "gromos"
    else:
        from_ff     = options["-from"] and options["-from"].value.lower() or "martini"
    mapping     = Mapping.Mapping(options["-mapdir"].value).get(source=from_ff,target=to_ff)
    backmapping = levels[from_ff] > levels[to_ff]
    reslist     = mapping.keys()


    ##### D. Iterate over atoms to write out, based on residue names


    # Copy the residue list from the target topology
    # This gives a list we can pop from, while keeping
    # the original.
    # The solvent residues are skipped.
    topresidues = None
    if top:
        topresidues = [i for i in top.residues]
        if options["-atomlist"]:
            atm    = open(options["-atomlist"].value,"w")
            topatm = [j for i in topresidues for j in i]
            atm.writelines("".join(["%6d %5s %5s\n"%(u,v[0],v[1]) for u,v in zip(range(1,len(topatm)+1),topatm)]))


    # Iterate over residues
    # If we are backmapping, we store the BB bead
    # positions, to generate a spline, which we use
    # afterwards to place the backbone atoms.
    # To set the positions, we use some bookkeeping
    # tricks for the atoms to place on, or relative
    # to, the spline.
    # The backbone list will end up being equal in 
    # length to the number of (amino acid) residues.
    # It is processed afterwards to be three times
    # the length. Indices are used to indicate which
    # entry from the resulting interpolated spline 
    # list need to be taken for the position, and an
    # offset (tuple) is added to control the placement
    # of hydrogens and oxygens to N/C.
    counter  =  0
    out      = []
    cg       = []
    raw      = []
    sol      = []
    ions     = []
    msgs     = []
    for residue,bb,nterm,cterm in zip(struc.residues,struc.backbone,struc.nterm,struc.cterm):


        counter += 1


        # Ignore solvent molecules from the topology
        while topresidues and topresidues[0][0][1] in solvent_stuff:
            topresidues.pop(0)


        # Unpack first atom
        first, resn, resi, chain, x, y, z = residue[0]


        # Extract residue name and atom list
        resn  = resn.strip()
        atoms = [i[0].strip() for i in residue]


        # Just read one residue from the CG structure
        # If we have a topology, we need to check whether
        # the residue we just read matches the next in the 
        # topology. Several cases are possible:
        #
        #   - The residuename is equal in both cases:
        #     This is too easy! Just proceed and thank your deity.
        #
        #   - The residues do not match, but the CG residuename 
        #     matches the AA moleculetype name:
        #     This may happen with lipids, if the atomistic structure
        #     is split in residues like in the De Vries model.
        #     In this case, all residues corresponding to the molecule
        #     need to be read from the topology, based on the chain 
        #     identifier.
        #
        #   - The residuename does not match, but the residue does:
        #     The residues should match, or at least the first 
        #     characters.      
        #
        #   - The residue does not match with either the residue or 
        #     moleculetype from the atomistic topology:
        #     If the residue is solvent, then we leave the topology
        #     untouched, and the atoms are generated based on the 
        #     mapping.


        # Check for solvent
        if resn in solvent.keys():
            cx, cy, cz = residue[0][4:7]
            for atom, x, y, z in solvent[resn]:
                # Should add random rotation
                if atom in ion_stuff:
                    atom = atoms[0]
                    ions.append((atom,resn,counter,chain,cx+x,cy+y,cz+z)) 
                    # They are added at the end, which is safe, as the
                    # ion position is taken from the CG bead position
                    # anyway.
                else:
                    # If we do not want solvent written then this is 
                    # a good time to break: the first atom of a stretch
                    # of solvent. Note that this ensures that we write 
                    # ions if we have those.
                    if not options["-sol"]:
                        break
                    resn = options["-solname"].value
                    # Increase the counter if we have an oxygen.
                    # A little hack to keep track of water molecules
                    if atom[0] == "O":
                        counter += 1
                    sol.append((atom,resn,counter,chain,cx+x,cy+y,cz+z)) 
            # Go to next residue
            continue


        # Read a residue from the target topology if we have one
        # Read several if the mapping so requires
        # Make an atom list from the residues read
        if topresidues:
            # Check whether the CG residue corresponds to the next AA residue
            # or to the next moleculetype 
            topres = topresidues.pop(0)
            if resn != topres[0][1] and resn == topres[0][7]:
                # Add residues based on chain id
                while topresidues and topresidues[0][0][3] == topres[0][3]:
                    topres.extend(topresidues.pop(0))
            topres = [i for j in range(mapnum.get(resn,1)) for i in topres]
            # Set the residue name to the moleculetype name
            topres[0][3] = topres[0][7]
            target = list(zip(*topres))[0]
            # Check for duplicate atom names
            if not len(target) == len(set(target)):
                print("The target list for residue %s contains duplicate names. Relying on mapping file."%resn)
                target = None
        else:
            target = None


        # Except for solvent, the residue name from a topology 
        # takes precedence over the one from the structure.
        if top and topres[0][1] in mapping.keys():
            resn = topres[0][1]

        
        # Check if the residue is in the list
        # or whether we have an ambiguity.
        # In that case the first part of the 
        # residue proper is equal to what we have
        # and the atom lists should be equal
        if not resn in reslist:
            oldname = resn
            p = set(atoms)
            for i in reslist:
                if i.startswith(resn):
                    if p == set([k for j in mapping[i].map.values() for k in j]):
                        msg="Residue %s not found. Seems to match %s."%(resn,i)
                        if not msg in msgs:
                            print(msg)
                            msgs.append(msg)
                        resn = i
                        break
            if resn == oldname:
                # Last resort ... Checking for partially matching atom lists
                for i in reslist:
                    if i.startswith(resn):
                        keys = mapping[i].map.values()+[mapping[i].prekeys]
                        if p.issubset(set([k for j in keys for k in j])):
                            msg="Residue %s not found. Seems to match %s."%(resn,i)
                            if not msg in msgs:
                                print(msg)
                                msgs.append(msg)
                            resn = i
                            break
            

        if not resn in mapping.keys():
            # If the residue is still not in the mapping list
            # then there is no other choice that to bail out
            raise ValueError("Residue not found in mapping dictionary: %s\n"%resn)
            
    
        o, r = mapping[resn].do(residue,target,bb,nterm,cterm,options["-nt"])
        out.extend(o)
        raw.extend(r)


    ## Write out

    # Combine things

    out.extend(sol)
    out.extend(ions)
    raw.extend(sol)
    raw.extend(ions)

    # Write out

    # Title
    if backmapping:
        title = "Backmapped structure from MARTINI to %s\n"%options["-to"].value
    else:
        title = "Mapped structure from %s to MARTINI\n"%options["-from"].value

    write_gro(options["-o"].value, title, out, struc.groBoxString(), options["-kick"].value)
    if options["-raw"]:
        write_gro(options["-raw"].value, "Projected structure before modifications\n",
                raw, struc.groBoxString(), 0)

    ## Write the output topology
    if options["-p"] and options["-po"]:
        write_topology(options["-po"].value, options["-p"].value, sol, ions)
    

    ## Write an index file
    if options["-n"]:
        write_ndx(options["-n"].value, out, protein_stuff, solvent_stuff)
Ejemplo n.º 23
0
            minRa = np.min(ra)
            maxRa = np.max(ra)
            minDec = np.min(dec)
            maxDec = np.max(dec)

            if (crval[0] < minRa) | (crval[0] > maxRa) | (
                    crval[1] < minDec) | (crval[1] > maxDec):
                print(
                    'WARNING: MAP CENTRE DOES NOT MATCH TELESCOPE POINTING CENTRE. CHECK COORDINATES'
                )
                print('MEAN RA: {:.2f}, MEAN DEC: {:.2f}'.format(
                    np.mean(ra), np.mean(dec)))

            #pyplot.plot(ra[0,:], dec[0,:])
            #pyplot.show()
            wcs, _, _ = Mapping.DefineWCS(naxis, cdelt, crval)
            pix = Mapping.ang2pixWCS(wcs, dec[0, :], ra[0, :]).astype('int')
            print(tod[0, 0, 0, :].shape, pix.shape)
            nhorns = tod.shape[0]
            nsidebands = tod.shape[1]
            nchans = tod.shape[2]
            m = np.zeros((nhorns, nsidebands, nchans, naxis[0] * naxis[1]))

            # for i in range(nhorns):
            #    pix = Mapping.ang2pixWCS(wcs, dec[i,:], ra[i,:]).astype('int')#

            #    for j in range(nsidebands):
            #        for k in range(nchans):
            #            print(tod.shape, pix.shape, obs.shape)
            #            m[i,j,k,:],a0 = Mapping.Destripe(tod[i,j,k,:], pix, obs, int(5/0.02), int(naxis[0]*naxis[1]))
Ejemplo n.º 24
0
def FitTOD(tod, ra, dec, obs, clon, clat, prefix='', destripe=False):
    # Beam solid angle aken from James' report on wiki Optics
    nubeam = np.array([26., 33., 40.])
    srbeam = np.array([2.1842e-6, 1.6771e-6, 1.4828e-6])
    pmdl = interp1d(nubeam, srbeam) # straight interpolation

    nHorns = tod.shape[0]
    nSidebands = tod.shape[1]
    nChans = tod.shape[2]
    nSamps = tod.shape[3]
    nParams = 5 + 0# 2


    wcs, xr, yr = Mapping.DefineWCS(naxis, cdelt, crval)

    # Crossing indices
    crossings = np.zeros(nHorns)


    # Calculate RMS from adjacent pairs
    splitTOD = (tod[:,:,:,:(nSamps//2) * 2:2] - tod[:,:,:,1:(nSamps//2)*2:2])
    rms = np.std(splitTOD,axis=3)/np.sqrt(2)

    t = np.arange(nSamps)
    P1 = np.zeros((nHorns, nSidebands, nChans, nParams+1))
    errors = np.zeros((nHorns, nSidebands, nChans, nParams))

    fig = pyplot.figure(figsize=(16,16))
    # Rotate the ra/dec
    width = 4.
    pixwidth = 2./60.
    nbins = int(width/pixwidth)
    xygrid = [np.linspace(-width/2, width/2,nbins+1), np.linspace(-width/2, width/2.,nbins+1)]
    xgrid, ygrid = np.meshgrid(np.linspace(-width/2, width/2,nbins)+pixwidth/2., np.linspace(-width/2, width/2.,nbins) + pixwidth/2.)
    for i in  range( nHorns):
        x, y = Pointing.Rotate(ra[i,:], dec[i,:], clon, clat, 0)
        
        # Bin the map up, apply a filter to remove spikes, to give first guess at pixel centre.
        hmap = np.histogram2d(y,x, xygrid)[0]
        todTemp = np.median(np.median(tod[i,:,:,:],axis=0),axis=0)
        rmsTemp = np.std(todTemp[1:todTemp.size//2 *2:2] - todTemp[:todTemp.size//2 * 2:2])/np.sqrt(2)
        smap = np.histogram2d(y,x, xygrid, weights = todTemp)[0]
        xpix = (x+width/2)//pixwidth 
        ypix = (y+width/2)//pixwidth
        #pyplot.plot(xpix, ypix, 'o')
        #pyplot.figure()
        #pyplot.plot(x, y, 'o')
        #pyplot.show()

        if destripe:
            pixels = (ypix + nbins*xpix).astype(int)
            offset = 300
            gd = (pixels > 0) & (pixels < nbins*nbins-1)
            m, offsets = Mapping.Destripe(todTemp[gd], pixels[gd], obs[gd], offset, nbins*nbins)
            m = np.reshape(m, (nbins, nbins)).T 
            #m = smap/hmap
        else:
            #h, wx, wy = np.histogram2d(x,y, (xygrid[0], xygrid[1]))
            #s, wx, wy = np.histogram2d(x,y, (xygrid[0], xygrid[1]), weights=todTemp)
            #m = s/h
            offsets  =0
            m, hits = Mapping.MakeMapSimple(todTemp, x, y, wcs)
            m = m/hits

        m -= np.nanmedian(m)
        #m /= rmsTemp
        m[np.isnan(m)] = 0.

        #pyplot.plot(ra[i,:], todTemp-offsets,'.')
        #pyplot.figure()
        #pyplot.plot(dec[i,:], todTemp-offsets,'.')
        #pyplot.show()


        r = np.sqrt((xgrid)**2 + (ygrid)**2)   
        
        d1 = ImagePeaks(m, r, 4)
        # if len(coords1) > 0:
        #     ximax, yimax = coords1[0]
        # else:
        #     m = median_filter(m, size=(2,2))
        # Remove background?
        xgrid2,ygrid2 = np.meshgrid(np.linspace(-d1.shape[0]/2, d1.shape[0]/2, d1.shape[0]), np.linspace(-d1.shape[1]/2, d1.shape[1]/2, d1.shape[1]))
        background = np.sqrt((xgrid2 - 0)**2 + (ygrid2 - 0)**2) > 25
        print(d1.shape, xgrid.shape)
        d1[background] = 0
        
        ximax, yimax = np.unravel_index(np.argmax(d1), m.shape)
        print(ximax, yimax, np.max(d1))
        #pyplot.imshow(m)
        #pyplot.plot(yimax, ximax,'ow')
        #pyplot.figure()
        #pyplot.imshow(d1)
        #pyplot.plot(yimax, ximax,'ow')
        #pyplot.show()

        # +/- 180
        x[x > 180] -= 360

        #pyplot.plot(x,y)
        #pyplot.figure()
        #pyplot.plot(az[i,:], el[i,:])
        #pyplot.show()

        #xbins = np.linspace(-4, 4, 60*8+1)
        #ybins = np.linspace(-4, 4, 60*8+1)

        # Just select the near data
        y0,x0 = -xr[ximax,yimax], yr[ximax,yimax]
        
        print(x0, y0)
        
        #print(x0, y0)
        #pyplot.figure(figsize=(6,6))
        #pyplot.plot(yr.flatten(),m.flatten(),'-')
        #pyplot.axvline(y0,color='r')
        #pyplot.show()


        background = np.sqrt((x - x0)**2 + (y - y0)**2) > 30./60.
        
        

        #x0, y0 = 0, 0
        #pyplot.plot(x,tod[0,0,0,:])
        #pyplot.axvline(x0)
        #pyplot.axvline(x[np.argmax(tod[0,0,0,:])],color='r')
        #pyplot.figure()
        #pyplot.plot(tod[0,0,0,:])
        #pyplot.show()
        r = np.sqrt((x-x0)**2 + (y-y0)**2)
        close = (r < 12.5/60.)
        near  = (r < 25./60.) & (r > 15/60.)
        far   = (r > 15./60.)
        fitselect = (r < 20./60.)
        time = np.arange(tod.shape[-1])
        #pyplot.plot(time[:], tod[0,0,0,:])
        #pyplot.plot(time[fitselect], tod[0,0,0,fitselect])
        #pyplot.show()
        #extent = [-naxis[0]/2. * cdelt[0], naxis[0]/2. * cdelt[0], 
        #          -naxis[1]/2. * cdelt[1], naxis[1]/2. * cdelt[1]]

        #pyplot.figure(figsize=(6,6))
        
        #m2 = m*0
        #r2 = np.sqrt((xr-x0)**2 + (yr-y0)**2)
        
        #m2 = (r2 < 10./60.)
        #pyplot.imshow(m.T,extent=extent,origin='lower')
        #pyplot.scatter(x0,y0, marker='o',color='r')
        #pyplot.figure()
        #pyplot.imshow(m2.T,extent=extent,origin='lower')
        #pyplot.scatter(x0,y0, marker='o',color='r')
        #pyplot.show()
        

        #pyplot.plot(time[:],todTemp[:],'.')
        #pyplot.plot(time[fitselect],todTemp[fitselect],'.')
        #pyplot.show()

        plotselect = (r < 120./60.)
        for j in range(nSidebands):
            
            for k in range(nChans):
                
                rmdl = np.poly1d(np.polyfit(time[background], tod[i,j,k,background]-offsets,11))
                todBackground =RemoveBackground(tod[i,j,k,:], rms[i,j,k], close, sampleRate=50, cutoff=1.)
                
                if destripe:
                    m, offsets = Mapping.Destripe(tod[i,j,k,gd], pixels[gd], obs[gd], offset, nbins*nbins)
                    m = np.reshape(m, (nbins,nbins)).T
                else:
                    m, hits = Mapping.MakeMapSimple(tod[i,j,k,:], x, y, wcs)
                    m = m/hits


                #tod[i,j,k,:] = tod[i,j,k,:] -offsets #-(rmdl(ra[i,:])+dmdl(dec[i,:]))
                tod[i,j,k,:] -= todBackground#np.median(tod[i,j,k,:])
                fitdata = tod[i,j,k,fitselect]
                fitra = x[fitselect]
                fitdec= y[fitselect]

                amax = np.argmax(fitdata)

                P0 = [np.max(fitdata) -np.median(fitdata) ,
                      4./60./2.355,
                      4./60./2.355,
                      np.median(fitdata),
                      x0,
                      y0,
                      0.,
                      0., 0.]#,0.,0., 0., 0.]
                P0 = [np.max(fitdata) -np.median(fitdata) ,
                      4./60./2.355,
                      x0,
                      y0,
                      np.median(fitdata),
                      0., 0.]#,0.,0., 0., 0.]
                P0 = [np.max(fitdata) -np.median(fitdata) ,
                      4./60./2.355,
                      x0,
                      y0,
                      np.median(fitdata)]#,0.,0., 0., 0.]


                #P1[i,j,k,:nParams], cov_x, info, mesg, s = leastsq(Error, P0, args=(fitra, fitdec, fitdata, 0,0), full_output=True)
                #fout = fmin(ErrorFmin, P0, maxfun=3000, args=(fitra, fitdec, fitdata, 0,0), full_output=True)
                #fout = leastsq(ErrorLstSq, P0, args=(fitra, fitdec, fitdata, 0,0), full_output=True)
                                
                ndim, nwalkers = len(P0), 100
                pos = [np.array(P0) + 1e-4*np.random.randn(ndim) for iwalker in range(nwalkers)]
                sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprobNoGrad, args=(fitra, fitdec, fitdata, rms[i,j,k]))
                sampler.run_mcmc(pos, 1200)
                samples = sampler.chain[:, 500:sampler.chain.shape[1]:3, :].reshape((-1, ndim))
                
                fout = np.mean(samples,axis=0), 
                P1[i,j,k,:nParams] = fout[0]
                print(fout[0])
                print (P0)
                print(x0, y0)
                #import corner
                #pyplot.clf()
                #fig = corner.corner(samples)
                #pyplot.show()
                
                P2 = P1[i,j,k,:nParams]*1.
                P2[0] = 0.

                ntod = Gauss2dNoGrad(P1[i,j,k,:nParams], x, y, 0,0)
                nulltod = Gauss2dNoGrad(P2, x, y, 0.,0.)
                otod = tod[i,j,k,:]
                omaps, hits = Mapping.MakeMapSimple(otod, x, y, wcs)
                nmaps, hits = Mapping.MakeMapSimple(ntod, x, y, wcs)
                nulls, hits = Mapping.MakeMapSimple(nulltod, x, y, wcs)


                xgrid, ygrid = np.meshgrid(np.arange(naxis[0]), np.arange(naxis[1]))
                xgrid = (xgrid-naxis[0]/2.)
                ygrid = (ygrid-naxis[1]/2.)
                rgrid = np.sqrt(xgrid**2 + ygrid**2)
                getchi = (rgrid < 5)
                nearChi = (rgrid > 7.5) & (rgrid < 15)

                maprms = np.nanstd((nmaps[nearChi]-omaps[nearChi])/hits[nearChi])
                rmap = (omaps[getchi]-nmaps[getchi])/hits[getchi]
                mapChi = np.nansum((rmap-np.mean(rmap))**2/maprms**2)/(nParams-1.)
                mapOrig = np.nansum(((omaps[getchi]-nulls[getchi])/hits[getchi])**2/maprms**2)/(nParams-1.)

                
                origChi = np.sum((tod[i,j,k,plotselect] -  Gauss2dNoGrad(P2, x[plotselect], y[plotselect], 0.,0.))**2/rms[i,j,k]**2)/(nParams-1.)
                reducedChi = np.sum((tod[i,j,k,plotselect] -  Gauss2dNoGrad(P1[i,j,k,:nParams], x[plotselect], y[plotselect], 0.,0.))**2/rms[i,j,k]**2)/(nParams-1.)
                #print('CHI2', origChi, reducedChi, mapChi, mapOrig, reducedChi/origChi)
                P1[i,j,k,nParams] = rms[i,j,k]


                if (k >= 0):
                    #r2 = np.sqrt((x-P1[i,j,k,2])**2 + (y-P1[i,j,k,3])**2)
                    #plotselect = (r2 < 15./60.)

                    ax = fig.add_subplot(3,1,1)
                    todPlot = tod[i,j,k,plotselect]
                    todPlot -= np.median(todPlot)
                    pyplot.plot(todPlot-Gauss2dNoGrad(P1[i,j,k,:nParams], x[plotselect], y[plotselect], 0,0))
                    pyplot.title('Horn {}, Sideband {}, Avg. Channel {} \n {}'.format(i,j,k, prefix))
                    pyplot.xlabel('Sample')
                    pyplot.ylabel('Detector Units')
                    pyplot.text(0.9,0.9,r'$rms$ = '+'{:.3f}'.format(rms[i,j,k]), ha='right', transform=ax.transAxes) 
                    ax = fig.add_subplot(3,1,2)
                    pyplot.plot(todPlot)

                    P3 = P1[i,j,k,:nParams]*1.
                    P3[3] = 0
                    P3[7:8] = 0
                    p3Model = Gauss2dNoGrad(P3, x[plotselect], y[plotselect], 0,0)
                    pyplot.plot(Gauss2dNoGrad(P1[i,j,k,:nParams], x[plotselect], y[plotselect], 0,0))#p3Model - np.nanmedian(p3Model))
                    pyplot.plot(Gauss2dNoGrad(P2, x[plotselect], y[plotselect], 0,0))

                    pyplot.xlabel('Sample')
                    pyplot.ylabel('Detector Units')
                    pyplot.text(0.9,0.9,r'$\chi^2$ = '+'{:.3f}'.format(mapChi), ha='right', transform=ax.transAxes) 
                    
                    
                    extent = [-naxis[0]/2. * cdelt[0], naxis[0]/2. * cdelt[0], 
                              -naxis[1]/2. * cdelt[1], naxis[1]/2. * cdelt[1]]
                    ax = fig.add_subplot(3,3,7)
                    ax.imshow(m, aspect='auto', extent=extent)
                    ax.scatter(P1[i,j,k,2], P1[i,j,k,3], marker='o',color='r')
                    ax = fig.add_subplot(3,3,8)
                    ax.imshow(nmaps/hits, extent=extent)
                    ax.scatter(P1[i,j,k,2], P1[i,j,k,3], marker='o',color='r')

                    ax = fig.add_subplot(3,3,9)
                    ax.imshow(nmaps/hits-m, extent=extent)
                    ax.scatter(P1[i,j,k,2], P1[i,j,k,3], marker='o',color='r')

                    #pyplot.tight_layout(True)
                    #pyplot.show()
                    pyplot.savefig('TODResidPlots/TODResidual_{}_H{}_S{}_C{}.png'.format(prefix, i,j,k), bbox_inches='tight')
                    pyplot.clf()
        cross = np.argmax(Gauss2dNoGrad(np.median(P1[i,0,:,:nParams],axis=0), x, y, 0,0))
        print( cross, nHorns)
        crossings[i] = cross
    return P1, errors, crossings
Ejemplo n.º 25
0
import Mapping, time

#Define Resolution
m = 1.0
dm = 0.1
cm = 0.01
mm = 0.001

if __name__ == '__main__':
    # time.sleep(10)
    # Mapping.collect_map(repeat=400) #save .npy map
    Mapping.convert_map(
        resulution=cm)  #convert all .npy to .png resolution = 1cm
Ejemplo n.º 26
0
            'onehot', 'onehot_shared', 'phonetic', 'onehot_and_phonetic'
    ]:
        representation = {}
        for lang in lang_pair:
            representation[lang] = args.representation
    else:
        representation = dict(
            [x.split(':') for x in args.representation.split(',')])

    ## Print Representation and Mappings
    print 'Representation'
    print representation

    ### load the mapping
    mapping = {}
    shared_mapping_obj = Mapping.get_mapping_instance(shared_mapping_class)

    for lang in representation.keys():
        if representation[lang] in ['phonetic', 'onehot_and_phonetic']:
            mapping[lang] = shared_mapping_obj
        elif representation[lang] == 'onehot_shared':
            mapping[lang] = shared_mapping_obj
        elif representation[lang] == 'onehot':
            mapping[lang] = Mapping.CharacterMapping()

        with open(mapping_dir + '/' + 'mapping_' + lang + '.json',
                  'r') as mapping_file:
            mapping[lang].load_mapping(mapping_file)

    ## Print Representation and Mappings
    print 'Mapping'
Ejemplo n.º 27
0
struc = Structure(options["-f"].value,strict=options["-strict"].value)


##### C. Set the mapping dictionary


# Convert force field tags to lower case 
# Default is backmapping from MARTINI to GROMOS53A6
# If to_ff == martini, default from_ff = gromos
to_ff = options["-to"] and options["-to"].value.lower() or "gromos"
if to_ff == "martini" and not options["-from"]:
    from_ff = "gromos"
else:
    from_ff     = options["-from"] and options["-from"].value.lower() or "martini"
mapping     = Mapping.get(source=from_ff,target=to_ff)
backmapping = levels[from_ff] > levels[to_ff]
reslist     = mapping.keys()


##### D. Iterate over atoms to write out, based on residue names


# Copy the residue list from the target topology
# This gives a list we can pop from, while keeping
# the original.
# The solvent residues are skipped.
topresidues = None
if top:
    topresidues = [i for i in top.residues]
    if options["-atomlist"]:
Ejemplo n.º 28
0
from PointCloud import *


def signal_handler(signal, frame):
    print('Ctrl-C pressed')
    sys.exit(0)


signal.signal(signal.SIGINT, signal_handler)
print('Ctrl-C to close program')

# initializations
model = Camera()  # camera model
ds_cam = DatasetCamera()
tr = Tracking()  # tracking module
mp = Mapping()  # mapping module
pc = PointCloud()  # point cloud
rospy.init_node('cloud_stream', anonymous=True)

# video capture object
# cap = cv2.VideoCapture(0)

for im_id in xrange(108):

    im = cv2.imread('./dataset/image_00/data/' + str(im_id).zfill(10) + '.png',
                    0)

    if im_id == 0:
        tr.kp_old = detector.detect(im)
        tr.kp_old = np.array([x.pt for x in tr.kp_old], dtype=np.float32)
        tr.addView(im)
Ejemplo n.º 29
0
def convert_address_to_latlng(address_string):
    return Mapping.address_to_geo(address_string)
Ejemplo n.º 30
0
 def raw_data_to_keys(extracted_data):
     return ''.join(map(Mapping.Mapping().raw_to_key, extracted_data))
Ejemplo n.º 31
0
 def closeGripper(self):
     servoAngle = Mapping.getGripAngle(s.closeGripper)
     Servo.gripperServo(s.gripperPin, int(servoAngle))
     time.sleep(1)
     s.gripAngle = s.closeGripper
Ejemplo n.º 32
0
def run_train(args):
    """
     Training script
    """

    ## check for required parameters
    if args.lang is None:
        print 'ERROR: --lang has to be set'
        sys.exit(1)

    if args.data_dir is None and args.mode == 'train':
        print 'ERROR: --data_dir has to be set'
        sys.exit(1)

    if args.data_dir is None and args.mode == 'test':
        print 'ERROR: --output_dir has to be set'
        sys.exit(1)

    # Create output folders if required
    models_dir = args.output_dir + '/models/'
    mappings_dir = args.output_dir + '/mappings/'
    log_dir = args.output_dir + '/log/'

    for folder in [models_dir, mappings_dir]:
        if not os.path.exists(folder):
            os.makedirs(folder)

    #######################################
    # Reading data and creating mappings  #
    #######################################

    # Creating mapping object to store char-id mappings
    mapping = {}
    mapping = Mapping.get_mapping_instance(args.mapping_class)

    if args.use_mapping is not None:
        print 'Using existing vocabulary'
        with open(args.use_mapping, 'r') as mapping_json_file:
            mapping.load_mapping(mapping_json_file)

    print 'Start Reading Data'
    train_data = MonoDataReader.MonoDataReader(
        args.lang, args.data_dir + '/train.' + args.lang, mapping,
        args.max_seq_length)

    ## complete vocabulary creation
    if args.use_mapping is None:
        mapping.finalize_vocab()

    with open(mappings_dir + '/mapping_{}.json'.format(args.lang),
              'w') as mapping_json_file:
        mapping.save_mapping(mapping_json_file)

    ## Print Representation and Mappings
    print 'Mapping'
    print mapping

    print 'Vocabulary Statitics'
    print '{}: {}'.format(args.lang, mapping.get_vocab_size())
    sys.stdout.flush()

    # Reading Validation data
    valid_data = MonoDataReader.MonoDataReader(
        args.lang, args.data_dir + '/tun.' + args.lang, mapping,
        args.max_seq_length)
    ## Reading test data
    test_data = MonoDataReader.MonoDataReader(
        args.lang, args.data_dir + '/test.' + args.lang, mapping,
        args.max_seq_length)

    print 'Finished Reading Data'

    ###################################################################
    #    Interacting with model and creating computation graph        #
    ###################################################################

    # Creating Model object
    model = LanguageModel(args.lang, mapping, args.representation,
                          args.max_seq_length, args.embedding_size,
                          args.rnn_size)

    ## Creating placeholder for sequences, masks and lengths and dropout keep probability
    pl_batch_sequences = tf.placeholder(shape=[None, args.max_seq_length],
                                        dtype=tf.int32)
    pl_batch_sequence_lengths = tf.placeholder(shape=[None], dtype=tf.float32)

    optimizer_op = model.get_optimizer(pl_batch_sequences,
                                       pl_batch_sequence_lengths,
                                       args.learning_rate,
                                       args.dropout_keep_prob)

    tf.get_variable_scope().reuse_variables()

    loss_op = model.average_loss(pl_batch_sequences, pl_batch_sequence_lengths,
                                 1.0)

    print "Done with creating graph. Starting session"
    sys.stdout.flush()

    #Saving model
    saver = tf.train.Saver(max_to_keep=0)
    final_saver = tf.train.Saver()

    #Start Session
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    sess.run(tf.initialize_all_variables())
    if (args.start_from is not None):
        saver.restore(
            sess, '{}/models/model-{}'.format(args.output_dir,
                                              args.start_from))
        completed_epochs = args.start_from

    tf.train.SummaryWriter(log_dir, sess.graph)

    print "Session started"

    # Fractional epoch: stores what fraction of each dataset is used till now after last completed epoch.
    fractional_epochs = 0.0
    completed_epochs = 0

    steps = 0
    prev_steps = 0
    validation_losses = []

    epoch_train_time = 0.0
    epoch_train_loss = 0.0

    start_time = time.time()

    # Whether to continue or now
    cont = True

    print 'Starting training ...'
    while cont:
        # Selected the dataset whose least fraction is used for training in current epoch

        ### TRAIN
        update_start_time = time.time()

        sequences,sequence_masks,sequence_lengths, = \
                train_data.get_next_batch(args.batch_size)

        _, step_loss = sess.run(optimizer_op,
                                feed_dict={
                                    pl_batch_sequences: sequences,
                                    pl_batch_sequence_lengths: sequence_lengths
                                })

        fractional_epochs += float(len(sequences)) / train_data.num_words

        epoch_train_loss += step_loss

        update_end_time = time.time()
        epoch_train_time += (update_end_time - update_start_time)

        # One more batch is processed
        steps += 1
        # If all datasets are used for training epoch is complete
        if (fractional_epochs >= 1.0):

            # update epoch number
            completed_epochs += 1

            ### VALIDATION LOSS
            # Find validation loss
            valid_start_time = time.time()
            validation_loss = get_average_loss(valid_data, loss_op,
                                               pl_batch_sequences,
                                               pl_batch_sequence_lengths, sess)
            validation_losses.append(validation_loss)
            valid_end_time = time.time()
            epoch_valid_time = (valid_end_time - valid_start_time)

            ## TEST LOSS
            test_start_time = time.time()
            test_loss = get_average_loss(test_data, loss_op,
                                         pl_batch_sequences,
                                         pl_batch_sequence_lengths, sess)
            test_end_time = time.time()
            epoch_test_time = (test_end_time - test_start_time)

            print "Epochs Completed : " + str(completed_epochs).zfill(
                3) + "\t Training loss: " + str(epoch_train_loss /
                                                (steps - prev_steps))
            print "Epochs Completed : " + str(completed_epochs).zfill(
                3) + "\t Validation loss: " + str(validation_loss)
            print "Epochs Completed : " + str(completed_epochs).zfill(
                3) + "\t Test loss: " + str(test_loss)

            #        #### Comment it to avoid early stopping
            #        ## If validation loss is increasing since last 3 epochs, take the last 4th model and stop training process
            #        #if(completed_epochs>=4 and len(validation_losses)>=4 and all([i>j for (i,j) in zip(validation_losses[-3:],validation_losses[-4:-1])])):
            #        #    completed_epochs -= 3
            #        #    saver.restore(sess,models_dir+'my_model-'+str(completed_epochs))
            #        #    cont = False

            # If max_epochs are done
            if (completed_epochs >= args.max_epochs):
                cont = False

            saver.save(sess,
                       models_dir + 'model',
                       global_step=completed_epochs)

            print "Epochs Completed : "+str(completed_epochs).zfill(3)+ \
                    "\t Time (hh:mm:ss)::: train> {} valid> {} test> {}".format(
                            utilities.formatted_timeinterval(epoch_train_time),
                            utilities.formatted_timeinterval(epoch_valid_time),
                            utilities.formatted_timeinterval(epoch_test_time),
                            )

            ## update epoch variables
            prev_steps = steps
            fractional_epochs = 0.0
            epoch_train_time = 0.0
            epoch_train_loss = 0.0

            print "Epochs Completed : "+str(completed_epochs).zfill(3)+ \
                    "\t Number of training steps: {}".format(steps)

            print "Epochs Completed : "+str(completed_epochs).zfill(3)+ \
                    "\t Time of completion: {}".format(time.asctime())

            sys.stdout.flush()

    # save final model
    final_saver.save(
        sess, args.output_dir + '/final_model_epochs_' + str(completed_epochs))

    print 'End training'

    print 'Final number of training steps: {}'.format(steps)

    #### Print total training time
    end_time = time.time()
    print 'Total Time for Training (hh:mm:ss) : {}'.format(
        utilities.formatted_timeinterval(end_time - start_time))
Ejemplo n.º 33
0
def FitTOD(tod,
           ra,
           dec,
           clon,
           clat,
           cpang,
           prefix='',
           normalize=True,
           plotDir=None):
    """
    args:
    tod   - 
    ra    - 
    dec   - 
    clon  -
    clat  -
    cpang -

    kwargs:
    prefix      -
    destripe    -
    normalize   -
    justOffsets -
    """

    # Define the pixel grid
    # Pixel coordinates on sky
    wcs, xr, yr = Mapping.DefineWCS(naxis, cdelt, crval)
    r = np.sqrt((xr)**2 + (yr)**2)

    # Pixel coordinates in image
    xpix, ypix = np.meshgrid(np.arange(xr.shape[0]),
                             np.arange(yr.shape[1]),
                             indexing='ij')

    # Calculate RMS from adjacent pairs
    rms = CalcRMS(tod)

    # Rotate the RA/DEC to the source centre
    x, y = Pointing.Rotate(ra, dec, clon, clat, -cpang)

    r = np.sqrt((x)**2 + (y)**2)
    close = (r < 3.)  # Check if source is even with 3 degrees of field centre
    if np.sum((r < 6. / 60.)) < 10:
        print('Source not observed')
        return badval

    # Filter background or at least subtract a mean level
    try:
        todBackground = RemoveBackground(tod,
                                         rms,
                                         x,
                                         y,
                                         sampleRate=50,
                                         cutoff=1.)
        tod -= todBackground
    except (ValueError, IndexError):
        tod -= np.nanmedian(tod)

    # Create map of data centred on 0,0
    ms, hits = Mapping.MakeMapSimple(tod, x, y, wcs)
    m = ms / hits

    # Calculate the pair subtracted TOD to creat a residual map
    residTod = tod[:tod.size // 2 * 2:2] - tod[1:tod.size // 2 * 2:2]
    residmap, rh = Mapping.MakeMapSimple(residTod, x[:(tod.size // 2) * 2:2],
                                         y[:(tod.size // 2) * 2:2], wcs)
    residmap = residmap / rh
    mapNoise = np.nanstd(residmap) / np.sqrt(2)

    m -= np.nanmedian(m)
    m[np.isnan(m)] = 0.

    # Get an estimate of the peak location
    x0, y0, xpix0, ypix0 = ImagePeaks(m, xr, yr, mapNoise)

    if isinstance(x0, type(None)):
        print('No peak found')
        return badval

    # Just select the near data and updated peak location
    # Probably should add some way of not having these be hardcoded...
    r = np.sqrt((x - x0)**2 + (y - y0)**2)
    close = (r < 12.5 / 60.)
    near = (r < 25. / 60.) & (r > 15 / 60.)
    far = (r > 30. / 60.)
    fitselect = (r < 10. / 60.) & (np.isnan(tod) == False)
    plotselect = (r < 45. / 60.)

    if np.sum(fitselect) < 20:
        return badval

    fitdata = tod[fitselect]
    fitra = x[fitselect]
    fitdec = y[fitselect]

    # Initial guesses for fit
    P0 = [
        np.max(fitdata) - np.median(fitdata), 4. / 60. / 2.355,
        4. / 60. / 2.355, x0, y0,
        np.median(fitdata)
    ]

    # Run mcmc fit:
    ndim, nwalkers = len(P0), 100
    pos = [
        np.array(P0) + 1e-4 * np.random.randn(ndim)
        for iwalker in range(nwalkers)
    ]
    sampler = emcee.EnsembleSampler(nwalkers,
                                    ndim,
                                    lnprob,
                                    args=(fitra, fitdec, fitdata, rms))
    sampler.run_mcmc(pos, 1200)
    samples = sampler.chain[:, 500:sampler.chain.shape[1]:3, :].reshape(
        (-1, ndim))
    Pest = np.mean(samples, axis=0)
    Pstd = np.std(samples, axis=0)

    chi2 = np.sum((fitdata - Gauss2d2FWHM(Pest, fitra, fitdec, 0, 0))**2 /
                  rms**2) / (fitdata.size - len(Pest))
    if not isinstance(plotDir, type(None)):
        pyplot.plot(fitdata, label='data')
        pyplot.plot(Gauss2d2FWHM(Pest, fitra, fitdec, 0, 0), label='fit')
        pyplot.legend(loc='upper right')
        pyplot.ylabel('T (K)')
        pyplot.xlabel('Sample')
        pyplot.text(0.05,
                    0.9,
                    r'$\chi^2$=' + '{:.2f}'.format(chi2),
                    transform=pyplot.gca().transAxes)
        pyplot.title(' {}'.format(prefix))
        pyplot.savefig('{}/PeakFits_{}.png'.format(plotDir, prefix),
                       bbox_inches='tight')
        pyplot.clf()
        #fig = corner.corner(samples)
        #pyplot.title('{}'.format(prefix))
        #pyplot.savefig('{}/Corner_{}.png'.format(plotDir, prefix), bbox_inches='tight')
        #pyplot.clf()
        #del fig

    # Normalise by rms
    if normalize:
        ms /= Pest[0]
    # Output fits + sample of peak crossing
    cross = np.argmax(Gauss2d2FWHM(Pest, x, y, 0, 0))

    return Pest, Pstd, cross, ms, hits, Gauss2d2FWHM(
        [1., Pest[1], Pest[2], Pest[3], Pest[4], 0], xr, yr, 0, 0) * outweight
Ejemplo n.º 34
0
import Android
import Mapping
import Manipulator
import time
xStart=6
yStart=7
xB=0
yB=0
myOrientation=-90

while True:
    #Wifi 
    [xB,yB,C]=Android.getWifiData(xB,yB)

    #Mapping 
    myOrientation=Mapping.drive(xStart,yStart,yB,xB,myOrientation)
    time.sleep(30)

    #Manipulator
    #Manipulator.grab(C) #2-Yelow Ball,1-Blue Ball

    #Mapping 
    myOrientation=Mapping.drive(yB,xB,xStart,yStart,myOrientation)






Ejemplo n.º 35
0
                crval = [0, 0]
            else:
                crval = [r0, d0]
            cdelt = np.array(json.loads(Parameters.get('Mapping','cdelt')))/60.

            minRa  = np.min(ra)
            maxRa  = np.max(ra)
            minDec = np.min(dec)
            maxDec = np.max(dec)

            if (crval[0] < minRa) | (crval[0] > maxRa) | (crval[1] < minDec) | (crval[1] > maxDec):
                print('WARNING: MAP CENTRE DOES NOT MATCH TELESCOPE POINTING CENTRE. CHECK COORDINATES')
                print('MEAN RA: {:.2f}, MEAN DEC: {:.2f}'.format(np.mean(ra), np.mean(dec)))
            

            wcs,_,_ = Mapping.DefineWCS(naxis, cdelt, crval)
            maps, hits = Mapping.MakeMaps(tod, ra, dec, wcs)
            dataout['hits']  = hits
            dataout['maps']  = maps
            dataout['naxis'] = np.array(naxis)
            dataout['cdelt'] = np.array(cdelt)
            dataout['crval'] = np.array(crval)


        sbStr = ''.join(str(e) for e in sidebands)
        hoStr = ''.join(str(e) for e in pixels)
        FileTools.WriteH5Py('{}/{}_{}_Horns{}_Sidebands{}.h5'.format(Parameters.get('Inputs', 'outputDir'), 
                                                                     Parameters.get('Inputs', 'outputname'),
                                                                     prefix,
                                                                     hoStr,
                                                                     sbStr), dataout)
Ejemplo n.º 36
0
import Mapping


# Define Start and Finish and the Starting Orientation
xA=6 # x Column and Y Raw
yA=7
xB=1
yB=7
myOrientation=-90
Mapping.drive(xA,yA,xB,yB,myOrientation)