Esempio n. 1
0
def main ():
    manager = Manager()

    # Range Sensor Distance
    distance = manager.list([0, 0, 0, 0, 0, 0])

    # DataBase
    db = manager.list()

    # Command from Keyboard
    keyboard_cmd = manager.dict()
    keyboard_cmd['vld'] = 0
    keyboard_cmd['cmd'] = ''

    # GoBack Ready
    goback_cmd = manager.dict()
    goback_cmd['vld'] = 0

    #Step 1: Initialzation
    initialization.initialize()
    #Step 2: Departure
    operations.take_off()
    #Step 3: Multi threading start
    to_thr = Process(target = timeout, args = (runtime, ))
    key_thr = Process(target = keyboard_thread.keyboard_thread_wrapper_function, args = (keyboard_cmd, ))
    cmd_thr = Process(target = command_thread.command_thread_wrapper_function, args = (distance, keyboard_cmd, db, goback_cmd, ))
    rs_thr = Process(target = range_sensor_thread.range_sensor_wrapper_function, args = (distance, ))
    gb_thr = Process(target = goback_thread.goback_wrapper_function, args = (goback_cmd, ))

    cmd_thr.start()
    rs_thr.start()
    to_thr.start()
    key_thr.start()
    gb_thr.start()

    #Step 4: Multi threading end
    to_thr.join()
    cmd_thr.terminate()
    rs_thr.terminate()
    key_thr.terminate()
    gb_thr.terminate()

    operations.landing()

    time.sleep(1)
    cleanup.cleanup()
def fit_model(structure, data_matrix, old_root=None, gibbs_steps=200):
    if old_root is None:
        X = data_matrix.sample_latent_values(np.zeros((data_matrix.m, data_matrix.n)), 1.)
        old_root = GaussianNode(X, 'scalar', 1.)
    root = initialization.initialize(data_matrix, old_root, old_root.structure(), structure, num_iter=gibbs_steps)
    model = models.get_model(structure, fixed_noise_variance=data_matrix.fixed_variance())
    models.align(root, model)
    dumb_samplers.sweep(data_matrix, root, num_iter=gibbs_steps)
    dumb_samplers.sweep(data_matrix, root, maximize=True, num_iter=1)  
    return root
Esempio n. 3
0
def main(dev_run: ("Runs all the training with 1 iteration only", 'option',
                   'd'),
         version="0.1"):
    # dev_run is a flag, which sets all the training iterations to 1,
    # for the purposes of testing the assembly process
    if dev_run:
        development_run = True
    else:
        development_run = False

    VERSION = version
    # converting the resources into JSON files
    from conversion import convert_resources
    convert_resources()
    print("\nResources converted\n")

    # initializing the model
    from initialization import initialize
    initialize()
    print("\nModel initialized\n")

    # training the components, this may naturally take long time
    # for training them separately at different times, just run the respective scripts
    from tagger_training import train_tagger
    train_tagger(development_run)
    print("\nTagger trained\n")

    from parser_training import train_parser
    train_parser(development_run)
    print("\nParser trained\n")

    from ner_training import train_ner
    train_ner(development_run)
    print("\nNER component trained\n")

    # assembling the model
    from assembly import assemble_model
    assemble_model(VERSION)
    print("\nModel assembled\n")
Esempio n. 4
0
def main ():
    loop = 0
    manager = Manager()
    distance = manager.list([0, 0, 0, 0, 0, 0])
#    distance2 = manager.list([0, 0, 0, 0, 0, 0])
    #Step 1: Initialzation
    initialization.initialize()
    #Step 2: Departure
    operations.take_off()
    #Step 3: Multi threading start
    timeout_thread = Process(target = timeout, args = (runtime, ))
    p1 = Process(target = command_thread.command_thread_wrapper_function, args = (distance, ))
    p2 = Process(target = range_sensor_thread.range_sensor_wrapper_function, args = (distance, ))
    p1.start()
    p2.start()
    timeout_thread.start()
    #Step 4: Multi threading end
    timeout_thread.join()
    p1.terminate()
    p2.terminate()
    time.sleep(1)
    cleanup.cleanup()
Esempio n. 5
0
def main ():
    loop = 0
    #Step 1: Initialzation
    initialization.initialize()
    #Step 2: Departure
    operations.take_off()
    #while True :
    while (loop < 300): #time for a loop: 0.15s
        loop += 1
        #Step 2: Detect Range
#        for i in range(0, range_sensor.num_directions):
#            distance[i] = range_sensor.detect_range(i)
        distance[4] = range_sensor.detect_range(4)
        time.sleep(0.05)
        #Step 3: Analyze Range
#        if (distance[4] > 180):
#            #operations.hover()
#            operations.move_backward()
#        elif (distance[4] < 180):
#            #operations.hover()
#            operations.move_backward()
#        else:
#            #operations.hover()
#            operations.move_backward()
        #Step 4: Generate Next Command
        if (loop < 50):
            operations.hover()
        elif (loop < 100):
            operations.move_backward()
        elif (loop < 150):
            operations.hover()
        elif (loop < 200):
            operations.move_backward()
        elif (loop < 250):
            operations.drop()
        else :
            operations.drop()
    cleanup.cleanup()
Esempio n. 6
0
def pmm(data, k, max_iterations=20, threshold=1e-4, verbose=False):
    """
    Fits a poisson mixture model to the data with k latent classes.

    Args:
        - data: data to assign to different mixtures, shape (num_samples, dim)
        - k: number of latent classes

    Return Values:
        - means: values of means params, shape (k, dim)
        - phis: probs of different classes, shape (k)
        - log_prob: last computed log probability of data
        - w: responsibilities of different classes for each sample, shape (num_samples, k)
    """
    # initialize the mean parameters for each class
    means, assignments, _ = initialization.initialize(data, k, num_runs=15)
    phis = np.zeros(k)
    for assignment in assignments:
        phis[assignment] += 1
    phis /= len(assignments)

    if verbose:
        utils.plot_1d_data_assigments(data, means, assignments)

    # repeatedly run e-step and m-step until convergence
    # initialize some values to reuse or return
    prev_log_prob, log_prob, w = 0, -1, None
    for idx in range(max_iterations):

        # e-step
        w, log_prob = mm.e_step(data, means, phis, density=utils.log_poisson)

        # m-step
        means, phis = m_step(data, k, w)

        # check for convergence
        diff = abs(log_prob - prev_log_prob)
        if diff < threshold:
            break
        else:
            prev_log_prob = log_prob

            if verbose:
                # check how it's going
                utils.plot_1d_data_responsibilities(data, w, means)
                print 'log prob: {:.5f}\tchange in log prob: {:.5f}'.format(log_prob, diff)
            
    return means, phis, log_prob, w
Esempio n. 7
0

def load_cookies(driver):

    cookies = pickle.load(open('cookies.txt', "rb"))
    driver.delete_all_cookies()
    # have to be on a page before you can add any cookies, any page - does not matter which
    driver.get("https://amazon.com")
    for cookie in cookies:
        driver.add_cookie(cookie)
    print('cookies loaded successfully')


if __name__ == '__main__':

    dr = initialize()
    action = actions()

    #load_cookies(dr)

    dr.get('https://instagram.com')

    time.sleep(50)

    follow_spree(dr)

    #unfollow_spree(dr)

    #save_cookies(dr)

    dr.quit()
Esempio n. 8
0
import pyxel
import random
import initialization
from ballclass import Ball
from playerclass import Player
from missileclass import Missile

pyxel.init(256, 256)
initialization.initialize()

counter = 0
player = Player()
balls = []
state = 'GAMEOVER'
hiscore = 0
missiles = []


def update():
    global state
    if state == 'GAMEOVER':
        if pyxel.btn(pyxel.KEY_ENTER):
            start_game()
            state = 'PLAYING'
    else:
        update_playing()


def start_game():
    global counter, balls, player
    counter = 0
Esempio n. 9
0
verbose = False
for o, a in opts:
	if o == "-v":
		verbose = True
	elif o in ("-h", "--help"):
		alphaBotUtility.usage()
		sys.exit()
	elif o in ("-k", "--handleOK"):
		handleOK = True
	elif o in ("-d", "--debug"):
		debug = True
	elif o in ("-r", "--report"):
		report.reportDirectory = str(a)
	elif o in ("-e", "--explorer"):
		report = str(a)
		reportExplorer.analyseReport(report)
		sys.exit()
	elif o in ("-l", "--exploreLatest"):
		reportExplorer.analyseReport(None)
		sys.exit()
	else:
		assert False, "unhandled option"

captchaDir = "./captcha"
if not os.path.exists(captchaDir):
    os.makedirs(captchaDir)

initialization.initialize(handleOK=handleOK)

bot.run(handleOK=handleOK, debug=debug)
Esempio n. 10
0
    def do_POST(self):
        """
        definiert den Umgang mit POST Requests
        Liest den Body aus - gibt in zum konvertieren weiter

        """
        global dataset_info, net, experiment_id, max_display, dataset, _neighbors
        if self.path == "/nodes":
            print("post /nodes")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            #self.send_header('Access-Control-Allow-Origin', self.headers['origin'])
            self.end_headers()

            # get body from request
            content_len = int(self.headers['Content-Length'])
            body = self.rfile.read(content_len)

            # convert body to list
            data = json.loads(str(body).decode('utf-8'))  # python 2
            #data = json.loads(str(body, encoding='utf-8'))      # python 3
            # print(data)

            # Katjas code goes here
            reset(experiment_id, dataset=dataset)
            _neighbors = {'positive': [], 'negative': []}
            net, dataset_info = initialize(dataset=dataset, experiment_id=experiment_id, batch_size=100,
                                           lr=1e-4)
            experiment_id = dataset_info['experiment_id']
            # introduce scale factor
            limits = (-15, 15)
            pmax, pmin = dataset_info['position'].max(), dataset_info['position'].min()
            dataset_info['scale_func'] = lambda x: np.divide((limits[1]-limits[0]) * (x.copy() - pmin), pmax-pmin) + limits[0]
            dataset_info['inverse_scale_func'] = lambda x: np.divide((x.copy()-limits[0]) * (pmax-pmin), limits[1]-limits[0]) + pmin
            if N is None:
                N = len(dataset_info['name'])

            nodes = make_nodes(position=dataset_info['scale_func'](dataset_info['position'][:N]),
                               name=dataset_info['name'][:N],
                               label=dataset_info['label'][:N],
                               index=True)
            categories = dataset_info['categories']
            data = {'nodes': nodes, 'categories': categories}

            # make json
            data = json.dumps(data).encode()
            self.wfile.write(data)  #body zurueckschicken

        if self.path == "/trainSvm":
            print("post /trainsvm")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

            # get body from request
            content_len = int(self.headers['Content-Length'])
            body = self.rfile.read(content_len)

            # convert body to list
            data = json.loads(str(body).decode('utf-8'))  # python 2
            #data = json.loads(str(body, encoding='utf-8'))      # python 3
            print(data)

            # Katjas code goes here
            # p, n = katja_function(data.p, data.n)

            # make json
            # data = json.dumps({p: p, n: n}).encode()
            self.wfile.write(data)  #body zurueckschicken

        if self.path == "/stopSvm":
            print("post /stopSvm")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

            # get body from request
            #content_len = int(self.headers['Content-Length'])
            #body = self.rfile.read(content_len)

            # convert body to list
            #data = json.loads(str(body).decode('utf-8'))  # python 2
            #data = json.loads(str(body, encoding='utf-8'))      # python 3
            #print(data)

            # Katjas code goes here
            # p, n = katja_function(data.p, data.n)

            # make json
            #data = json.dumps({p: p, n: n}).encode()
            self.wfile.write("stopped Svm")  #body zurueckschicken

        if self.path == "/updateLabels":
            print("post /updateLabels")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

            # get body from request
            #content_len = int(self.headers['Content-Length'])
            #body = self.rfile.read(content_len)

            # convert body to list
            #data = json.loads(str(body).decode('utf-8'))  # python 2
            #data = json.loads(str(body, encoding='utf-8'))      # python 3
            #print(data)

            # Katjas code goes here
            # katja_function(data.p, data.n)

            # make json
            #data = json.dumps({}).encode()
            self.wfile.write(data)  #body zurueckschicken

        if self.path == "/getGroupNeighbours":
            print("post /getGroupNeighbours")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

            # get body from request
            content_len = int(self.headers['Content-Length'])
            body = self.rfile.read(content_len)

            # convert body to list
            data = json.loads(str(body).decode('utf-8'))  # python 2
            # data = json.loads(str(body, encoding='utf-8'))      # python 3

            print(data)
            print(len(data['positives']))

            try:
                _neighbors['negative'].extend(data['negatives'])
            except KeyError:
                pass

            _neighbors['positive'], scores = select_neighbors(dataset_info['feature'][:N],
                                                              data['positives'], _neighbors['negative'],
                                                              k=2000, neighbor_fn=svm_k_nearest_neighbors, test=False)

            print('New neighbors {}'.format(_neighbors['positive']))
            # Katjas code goes here
            # katja_function(data.p, data.n)

            # make json
            data['neighbours'] = {neigh: score for neigh, score in zip(_neighbors['positive'], scores)}
            print(len(data['positives']))
            data['group'] = list(data['positives'])
            data = json.dumps(data).encode()
            self.wfile.write(data)  #body zurueckschicken

        if self.path == "/startUpdateEmbedding":
            print("post /startUpdateEmbedding")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

            # get body from request
            content_len = int(self.headers['Content-Length'])
            body = self.rfile.read(content_len)

            # convert body to list
            body = json.loads(str(body).decode('utf-8'))  # python 2
            # data = json.loads(str(body, encoding='utf-8'))      # python 3
            #print(body)

            #print(self.socket_id)
            self.socket_id = body['socketId']
            id = body['socketId']

            data = read_nodes(body['nodes'])
            self.wfile.write('update_embedding started for ' + str(self.socket_id))  # body zurueckschicken

            # Katjas code goes here
            new_position = np.stack([data['x'], data['y']], axis=1)
            new_position = dataset_info['inverse_scale_func'](new_position)
            old_position = dataset_info['position']

            idx_modified = get_modified(old_position[:N], new_position, tol=dataset_info['inverse_scale_func'](np.array([3,])))
            if len(idx_modified) == 0 or len(idx_modified) == len(dataset_info['name']):       # TODO: fix recall bug
                print('Modified {} samples. - Invalid for training.')
                return 0
            # idx_old_neighbors = get_neighborhood(old_position[:N], idx_modified)
            # idx_new_neighbors = get_neighborhood(new_position[:N], idx_modified)
            idx_old_neighbors, _ = mutual_k_nearest_neighbors(old_position[:N], idx_modified, k=50)
            idx_new_neighbors = get_neighborhood(new_position[:N], idx_modified)

            print('Train MapNet using {} positives and {} negatives.'.format(len(idx_modified) + len(idx_new_neighbors),
                                                                             len(_neighbors['negative'])))

            # net, dataset_info = initialize(dataset=dataset, experiment_id=experiment_id, batch_size=100,
            #                                lr=1e-4)
            # experiment_id = dataset_info['experiment_id']
            # # introduce scale factor
            # limits = (-15, 15)
            # pmax, pmin = dataset_info['position'].max(), dataset_info['position'].min()
            # dataset_info['scale_func'] = lambda x: np.divide((limits[1] - limits[0]) * (x.copy() - pmin), pmax - pmin) + \
            #                                        limits[0]
            # dataset_info['inverse_scale_func'] = lambda x: np.divide((x.copy() - limits[0]) * (pmax - pmin),
            #                                                          limits[1] - limits[0]) + pmin
            # if N is None:
            #     N = len(dataset_info['name'])
            new_position = train(net, dataset_info['feature'][:N], dataset_info['name'].values.astype(str)[:N],
                                 old_position[:N], new_position,
                                 idx_modified, idx_old_neighbors, idx_new_neighbors,
                                 _neighbors['negative'],
                                 categories=dataset_info['categories'], label=dataset_info['label'][:N],
                                 lr=1e-3, experiment_id=experiment_id, socket_id=self.socket_id,
                                 scale_func=dataset_info['scale_func'])

            dataset_info['position'] = new_position
            _neighbors = {'positive': [], 'negative': []}
            return 0

            # TODO was ist wenn das mehrfach gestartet wird
            # self.inter = SetInterval(0.6, update_embedding_handler, id)
            # self.inter.socket_id = id
            # self.inter.start()
            # t = threading.Timer(5, self.inter.cancel)
            # t.start()

            # make json
            # data = json.dumps({}).encode()

        if self.path == "/stopUpdateEmbedding":
            print("post /stopUpdateEmbedding")
            ### POST Request Header ###
            self.send_response(200)
            self.send_header('Content-type', 'application/json')
            self.end_headers()

        return
Esempio n. 11
0
verbose = False
for o, a in opts:
    if o == "-v":
        verbose = True
    elif o in ("-h", "--help"):
        alphaBotUtility.usage()
        sys.exit()
    elif o in ("-k", "--handleOK"):
        handleOK = True
    elif o in ("-d", "--debug"):
        debug = True
    elif o in ("-r", "--report"):
        report.reportDirectory = str(a)
    elif o in ("-e", "--explorer"):
        report = str(a)
        reportExplorer.analyseReport(report)
        sys.exit()
    elif o in ("-l", "--exploreLatest"):
        reportExplorer.analyseReport(None)
        sys.exit()
    else:
        assert False, "unhandled option"

captchaDir = "./captcha"
if not os.path.exists(captchaDir):
    os.makedirs(captchaDir)

initialization.initialize(handleOK=handleOK)

bot.run(handleOK=handleOK, debug=debug)