Exemplo n.º 1
0
    def __init__(self):
        self.applications = Applications()
        self.applications.generate_apps_file()

        gladeFile = Util.get_path(self, 'assets/main.glade')
        self.builder = Gtk.Builder()
        self.builder.add_from_file(gladeFile)
        self.builder.connect_signals(self)

        self.window = self.builder.get_object('Main')
        self.window.connect('delete-event', Gtk.main_quit)
        # self.window.set_skip_taskbar_hint(True)

        display = self.builder.get_object('Display')
        display.set_vexpand(True)
        self.treeview = Treeview(display, self.open)

        self.search_entry = self.builder.get_object('Search')
        self.search_entry.modify_font(Pango.FontDescription('Tahoma 25'))

        # Populate apps
        self.result = self.applications.filter_apps('')
        self.treeview.add_new(self.result)

        self.window.show_all()
Exemplo n.º 2
0
def parse_conf(filename, conf_dict):
    config = configparser.ConfigParser()
    config.read(filename)

    for key in ['port', 'user', 'home', 'display', 'log_journal', 'log_file']:
        conf_dict[key] = config['server'][key]
    for key in ['log_journal', 'log_file']:
        conf_dict[key] = config.getboolean('server', key)
    if conf_dict['log_file']:
        conf_dict['log_path'] = config['server'].get('log_path',
                                                     'myserver.log')
    for app in config['apps']:
        Applications.add_application(app, **json.loads(config['apps'][app]))
Exemplo n.º 3
0
    def __init__(self):
        super().__init__()
        self.company = Companies()
        self.applications = Applications()
        self.communications = Communications()
        self.setup()

        self.recent_applications = [[None for i in range(4)] for j in range(3)]
        self.upcoming_activities = [[None for i in range(3)] for j in range(3)]
        self.load_recent_applications()
        self.load_upcoming_activities()

        self.company_details = []
Exemplo n.º 4
0
 def __init__(self, classes=None, applications=None, parameters=None,
              uri=None, name=None, environment=None):
     if classes is None: classes = Classes()
     self._set_classes(classes)
     if applications is None: applications = Applications()
     self._set_applications(applications)
     if parameters is None: parameters = Parameters()
     self._set_parameters(parameters)
     self._uri = uri or ''
     self._name = name or ''
     self._environment = environment or ''
Exemplo n.º 5
0
 def __init__(self,
              classes=None,
              applications=None,
              parameters=None,
              name=None):
     if classes is None: classes = Classes()
     self._set_classes(classes)
     if applications is None: applications = Applications()
     self._set_applications(applications)
     if parameters is None: parameters = Parameters()
     self._set_parameters(parameters)
     self._name = name or ''
Exemplo n.º 6
0
class Main():
    def __init__(self):
        self.applications = Applications()
        self.applications.generate_apps_file()

        gladeFile = Util.get_path(self, 'assets/main.glade')
        self.builder = Gtk.Builder()
        self.builder.add_from_file(gladeFile)
        self.builder.connect_signals(self)

        self.window = self.builder.get_object('Main')
        self.window.connect('delete-event', Gtk.main_quit)
        # self.window.set_skip_taskbar_hint(True)

        display = self.builder.get_object('Display')
        display.set_vexpand(True)
        self.treeview = Treeview(display, self.open)

        self.search_entry = self.builder.get_object('Search')
        self.search_entry.modify_font(Pango.FontDescription('Tahoma 25'))

        # Populate apps
        self.result = self.applications.filter_apps('')
        self.treeview.add_new(self.result)

        self.window.show_all()

    def on_key_press(self, widget, event):
        keyname = Gdk.keyval_name(event.keyval)
        if keyname == 'Escape':
            Gtk.Window.close(self.window)
            return False

        if keyname == 'Up' \
            or keyname == 'Down' \
            or keyname == 'Return' :
            return False
        else:
            if not self.search_entry.is_focus():
                self.search_entry.grab_focus()
        return False

    def on_search(self, element):
        search_text = element.get_text()

        self.result = self.applications.filter_apps(search_text)
        self.display_app_search()

    def display_app_search(self):
        self.treeview.clear()
        self.treeview.add_new(self.result)

    def open(self, app_name):
        self.applications.launch_app(app_name)
        time.sleep(1)
        Gtk.Window.close(self.window)
Exemplo n.º 7
0
 def __init__(self,
              settings,
              classes=None,
              applications=None,
              parameters=None,
              exports=None,
              uri=None,
              name=None,
              environment=None):
     self._uri = uri or ''
     self._name = name or ''
     if classes is None: classes = Classes()
     self._set_classes(classes)
     if applications is None: applications = Applications()
     self._set_applications(applications)
     if parameters is None: parameters = Parameters(None, settings, uri)
     if exports is None: exports = Exports(None, settings, uri)
     self._set_parameters(parameters)
     self._set_exports(exports)
     self._environment = environment
Exemplo n.º 8
0
def generate_conf(filename):
    config = configparser.ConfigParser()
    config['server'] = {
        'port': '5000',
        'log_journal': 'yes',
        'log_file': 'no',
        'log_path': '/var/log/myserver.log',
        'user': '******',
        'home': '/home/eddie',
        'display': ':0'
    }

    Applications.add_application('kidepedia',
                                 'kill',
                                 'run_as_user',
                                 'display',
                                 start='firefox /opt/kidepedia/kidepedia.html',
                                 kill='pkill firefox')
    Applications.add_application('tuxpaint',
                                 'kill',
                                 'run_as_user',
                                 'display',
                                 start='tuxpaint',
                                 kill='pkill -9 tuxpaint')
    Applications.add_application('tuxmath',
                                 'kill',
                                 'run_as_user',
                                 'display',
                                 start='tuxmath',
                                 kill='pkill -9 tuxmath')
    Applications.add_application('gcompris',
                                 'kill',
                                 'run_as_user',
                                 'display',
                                 start='gcompris',
                                 kill='pkill -9 gcompris')
    Applications.add_application(
        'screensaver',
        'run_as_user',
        kill='xscreensaver-command -display :0 -deactivate')
    Applications.add_application('xserver', restart='service lightdm restart')
    Applications.add_application('epoptes', restart='/usr/sbin/epoptes-client')
    Applications.add_application('calc',
                                 'display',
                                 start='galculator',
                                 kill='pkill galculator')
    Applications.add_application('echo', 'run_as_user', start='whoami')
    Applications.add_application('echo2', start='whoami')

    config['apps'] = {}
    p = Applications.get_all()
    for i in p:
        config['apps'][i] = json.dumps(p[i])

    with open(filename, 'w') as configfile:
        config.write(configfile)
Exemplo n.º 9
0
class MainWindow(QObject):
    def __init__(self):
        super().__init__()
        self.company = Companies()
        self.applications = Applications()
        self.communications = Communications()
        self.setup()

        self.recent_applications = [[None for i in range(4)] for j in range(3)]
        self.upcoming_activities = [[None for i in range(3)] for j in range(3)]
        self.load_recent_applications()
        self.load_upcoming_activities()

        self.company_details = []

    @Slot(int, int, result=str)
    def get_app_field(self, row, col):
        return self.recent_applications[row][col]

    @Slot(int, int, result=str)
    def get_act_field(self, row, col):
        return self.upcoming_activities[row][col]

    @Slot(str)
    def set_company_details(self, string):
        self.company_details.append(string)

    @Slot(str, result=str)
    def get_company_details(self):
        return self.company_details.pop()

    @Slot(str, result=str)
    def welcome_text(self, name):
        return 'Welcome ' + name

    def load_recent_applications(self):
        for i, (app_ID, co_ID, app_date, stage, job, desc) in enumerate(
                self.applications.get_recent_applications()):
            self.recent_applications[i][0] = self.iso_to_date(app_date)
            self.recent_applications[i][1] = self.company.get_from_id(co_ID)[1]
            self.recent_applications[i][2] = job
            self.recent_applications[i][3] = stage

    def load_upcoming_activities(self):
        for i, (comm_id, app_id, comm_date, interaction, status,
                notes) in enumerate(
                    self.communications.get_upcoming_communications()):
            self.upcoming_activities[i][0] = self.iso_to_date(comm_date)
            self.upcoming_activities[i][1] = interaction
            self.upcoming_activities[i][2] = notes

    def iso_to_date(self, iso_date: str) -> str:
        return date.fromisoformat(iso_date).strftime('%B  %-d,  %Y')

    def setup(self):
        # Define connection and cursor
        connection = sqlite3.connect('jobs.db')
        cursor = connection.cursor()

        # Create companies' table
        cursor.execute("""CREATE TABLE IF NOT EXISTS companies (
                        company_ID INTEGER PRIMARY KEY,
                        company_name TEXT UNIQUE,
                        sector TEXT,
                        co_desc TEXT
                        )""")
        connection.commit()

        cursor.execute("""CREATE TABLE IF NOT EXISTS applications (
                        application_ID INTEGER PRIMARY KEY,
                        company_ID INTEGER,
                        application_date TEXT,
                        stage TEXT,
                        job_title TEXT,
                        job_desc TEXT,
                        FOREIGN KEY(company_ID) REFERENCES companies(company_ID)
                        )""")
        connection.commit()

        cursor.execute("""CREATE TABLE IF NOT EXISTS communications (
                        communication_ID INTEGER PRIMARY KEY,
                        application_ID INTEGER,
                        communication_date TEXT,
                        interaction TEXT,
                        status TEXT,
                        notes TEXT,
                        FOREIGN KEY(application_ID) REFERENCES applications(application_ID)
                        )""")
        connection.commit()
        connection.close()

    def display_all(code):
        tables = {0: 'companies', 1: 'applications', 2: 'communications'}
        connection = sqlite3.connect('jobs.db')
        cursor = connection.cursor()
        results = cursor.execute(f'SELECT * FROM {tables[code]}')
        for entry in results:
            print(entry)
        cursor.close()
        connection.close()
Exemplo n.º 10
0
def init_model(backbone_model_name, freeze_backbone_for_N_epochs, input_shape,
               region_num, attribute_name_to_label_encoder_dict,
               kernel_regularization_factor, bias_regularization_factor,
               gamma_regularization_factor, beta_regularization_factor,
               pooling_mode, min_value, max_value, use_horizontal_flipping):
    def _add_pooling_module(input_tensor):
        # Add a global pooling layer
        output_tensor = input_tensor
        if len(K.int_shape(output_tensor)) == 4:
            if pooling_mode == "Average":
                output_tensor = GlobalAveragePooling2D()(output_tensor)
            elif pooling_mode == "Max":
                output_tensor = GlobalMaxPooling2D()(output_tensor)
            elif pooling_mode == "GeM":
                output_tensor = GlobalGeMPooling2D()(output_tensor)
            else:
                assert False, "{} is an invalid argument!".format(pooling_mode)

        # Add the clipping operation
        if min_value is not None and max_value is not None:
            output_tensor = Lambda(lambda x: K.clip(
                x, min_value=min_value, max_value=max_value))(output_tensor)

        return output_tensor

    def _add_classification_module(input_tensor):
        # Add a batch normalization layer
        output_tensor = input_tensor
        output_tensor = BatchNormalization(epsilon=2e-5)(output_tensor)

        # Add a dense layer with softmax activation
        label_encoder = attribute_name_to_label_encoder_dict["identity_ID"]
        class_num = len(label_encoder.classes_)
        output_tensor = Dense(units=class_num,
                              use_bias=False,
                              kernel_initializer=RandomNormal(
                                  mean=0.0, stddev=0.001))(output_tensor)
        output_tensor = Activation("softmax")(output_tensor)

        return output_tensor

    def _triplet_hermans_loss(y_true,
                              y_pred,
                              metric="euclidean",
                              margin="soft"):
        # Create the loss in two steps:
        # 1. Compute all pairwise distances according to the specified metric.
        # 2. For each anchor along the first dimension, compute its loss.
        dists = cdist(y_pred, y_pred, metric=metric)
        loss = batch_hard(dists=dists,
                          pids=tf.argmax(y_true, axis=-1),
                          margin=margin)
        return loss

    # Initiation
    miscellaneous_output_tensor_list = []

    # Initiate the early blocks
    applications_instance = Applications()
    model_name_to_model_function = applications_instance.get_model_name_to_model_function(
    )
    assert backbone_model_name in model_name_to_model_function.keys(
    ), "Backbone {} is not supported.".format(backbone_model_name)
    model_function = model_name_to_model_function[backbone_model_name]
    blocks = applications_instance.get_model_in_blocks(
        model_function=model_function, include_top=False)
    vanilla_input_tensor = Input(shape=input_shape)
    intermediate_output_tensor = vanilla_input_tensor
    for block in blocks[:-1]:
        block = Applications.wrap_block(block, intermediate_output_tensor)
        intermediate_output_tensor = block(intermediate_output_tensor)

    # Initiate the last blocks
    last_block = Applications.wrap_block(blocks[-1],
                                         intermediate_output_tensor)
    last_block_for_global_branch_model = replicate_model(
        model=last_block, suffix="global_branch")
    last_block_for_regional_branch_model = replicate_model(
        model=last_block, suffix="regional_branch")

    # Add the global branch
    miscellaneous_output_tensor = _add_pooling_module(
        input_tensor=last_block_for_global_branch_model(
            intermediate_output_tensor))
    miscellaneous_output_tensor_list.append(miscellaneous_output_tensor)

    # Add the regional branch
    if region_num > 0:
        # Process each region
        regional_branch_output_tensor = last_block_for_regional_branch_model(
            intermediate_output_tensor)
        total_height = K.int_shape(regional_branch_output_tensor)[1]
        region_size = total_height // region_num
        for region_index in np.arange(region_num):
            # Get a slice of feature maps
            start_index = region_index * region_size
            end_index = (region_index + 1) * region_size
            if region_index == region_num - 1:
                end_index = total_height
            sliced_regional_branch_output_tensor = Lambda(
                lambda x, start_index=start_index, end_index=end_index:
                x[:, start_index:end_index])(regional_branch_output_tensor)

            # Downsampling
            sliced_regional_branch_output_tensor = Conv2D(
                filters=K.int_shape(sliced_regional_branch_output_tensor)[-1]
                // region_num,
                kernel_size=3,
                padding="same")(sliced_regional_branch_output_tensor)
            sliced_regional_branch_output_tensor = Activation("relu")(
                sliced_regional_branch_output_tensor)

            # Add the regional branch
            miscellaneous_output_tensor = _add_pooling_module(
                input_tensor=sliced_regional_branch_output_tensor)
            miscellaneous_output_tensor_list.append(
                miscellaneous_output_tensor)

    # Define the model used in inference
    inference_model = Model(inputs=[vanilla_input_tensor],
                            outputs=miscellaneous_output_tensor_list,
                            name="inference_model")
    specify_regularizers(inference_model, kernel_regularization_factor,
                         bias_regularization_factor,
                         gamma_regularization_factor,
                         beta_regularization_factor)

    # Define the model used in classification
    classification_input_tensor_list = [
        Input(shape=K.int_shape(item)[1:])
        for item in miscellaneous_output_tensor_list
    ]
    classification_output_tensor_list = []
    for classification_input_tensor in classification_input_tensor_list:
        classification_output_tensor = _add_classification_module(
            input_tensor=classification_input_tensor)
        classification_output_tensor_list.append(classification_output_tensor)
    classification_model = Model(inputs=classification_input_tensor_list,
                                 outputs=classification_output_tensor_list,
                                 name="classification_model")
    specify_regularizers(classification_model, kernel_regularization_factor,
                         bias_regularization_factor,
                         gamma_regularization_factor,
                         beta_regularization_factor)

    # Define the model used in training
    expand = lambda x: x if isinstance(x, list) else [x]
    vanilla_input_tensor = Input(shape=K.int_shape(inference_model.input)[1:])
    vanilla_feature_tensor_list = expand(inference_model(vanilla_input_tensor))
    if use_horizontal_flipping:
        flipped_input_tensor = tf.image.flip_left_right(vanilla_input_tensor)
        flipped_feature_tensor_list = expand(
            inference_model(flipped_input_tensor))
        merged_feature_tensor_list = [
            sum(item_tuple) / 2 for item_tuple in zip(
                vanilla_feature_tensor_list, flipped_feature_tensor_list)
        ]
    else:
        merged_feature_tensor_list = vanilla_feature_tensor_list
    miscellaneous_output_tensor_list = merged_feature_tensor_list
    classification_output_tensor_list = expand(
        classification_model(merged_feature_tensor_list))
    training_model = Model(inputs=[vanilla_input_tensor],
                           outputs=miscellaneous_output_tensor_list +
                           classification_output_tensor_list,
                           name="training_model")

    # Add the flipping loss
    if use_horizontal_flipping:
        flipping_loss_list = [
            K.mean(mean_squared_error(*item_tuple)) for item_tuple in zip(
                vanilla_feature_tensor_list, flipped_feature_tensor_list)
        ]
        flipping_loss = sum(flipping_loss_list)
        training_model.add_metric(flipping_loss,
                                  name="flipping",
                                  aggregation="mean")
        training_model.add_loss(1.0 * flipping_loss)

    # Compile the model
    triplet_hermans_loss_function = lambda y_true, y_pred: 1.0 * _triplet_hermans_loss(
        y_true, y_pred)
    miscellaneous_loss_function_list = [
        triplet_hermans_loss_function
    ] * len(miscellaneous_output_tensor_list)
    categorical_crossentropy_loss_function = lambda y_true, y_pred: 1.0 * categorical_crossentropy(
        y_true, y_pred, from_logits=False, label_smoothing=0.0)
    classification_loss_function_list = [
        categorical_crossentropy_loss_function
    ] * len(classification_output_tensor_list)
    training_model.compile_kwargs = {
        "optimizer":
        Adam(),
        "loss":
        miscellaneous_loss_function_list + classification_loss_function_list
    }
    if freeze_backbone_for_N_epochs > 0:
        specify_trainable(model=training_model,
                          trainable=False,
                          keywords=[block.name for block in blocks])
    training_model.compile(**training_model.compile_kwargs)

    # Print the summary of the training model
    summarize_model(training_model)

    return training_model, inference_model