Ejemplo n.º 1
0
def run(protocol, csvfile):
	for path in sorted(glob.glob(f'../../data/grid4/*.json')):
		state = tools.load_json(path)
		(node_count, link_count) = tools.json_count(state)

		print(f'run {protocol} on {path}')

		network.apply(state=state, link_command=get_tc_command, remotes=remotes)
		tools.sleep(10)

		software_start_ms = tools.millis()
		software.start(protocol, remotes)
		software_startup_ms = tools.millis() - software_start_ms

		tools.sleep(30)

		paths = tools.get_random_paths(state, 2 * link_count)
		paths = tools.filter_paths(state, paths, min_hops=2, path_count=link_count)
		ping_result = tools.ping_paths(remotes=remotes, paths=paths, duration_ms=30000, verbosity='verbose')

		sysload_result = tools.sysload(remotes)

		software.clear(remotes)

		# add data to csv file
		extra = (['node_count', 'software_startup_ms'], [node_count, software_startup_ms])
		tools.csv_update(csvfile, '\t', extra, ping_result.getData(), sysload_result)

		network.clear(remotes)

		# abort benchmark when less then 40% of the pings arrive
		if (ping_result.received / ping_result.transmitted) < 0.4:
			break
Ejemplo n.º 2
0
 def reset(self, start_at):
     if start_at > 0:
         if self.json_path is not None:
             if self.json_path.exists():
                 self.H = load_json(self.json_path)
                 for k in self.H.keys():
                     self.H[k] = self.H[k][:start_at]
Ejemplo n.º 3
0
 def __init__(self,
              task,
              max_len=None,
              loss_weight=0.25,
              regularizers=0.001,
              lstm_in_dropout=0.5,
              lstm_dropout=0.5,
              fc_dropout=0.5):
     source_dir = os.path.join('.', 'dataset', 'data', task)
     if Config.word_vocab is None:
         Config.word_vocab, _ = load_vocab(
             os.path.join(source_dir, '..', 'words.vocab'))
         Config.vocab_size = len(Config.word_vocab)
         Config.word_emb = load_embeddings(
             os.path.join(source_dir, '..', 'glove.filtered.npz'))
         Config.emb_len = len(Config.word_emb[0])
         Config.word_num = len(Config.word_emb)
     self.max_len = max_len
     self.loss_weight = loss_weight
     self.regularizers = regularizers
     self.lstm_in_dropout = lstm_in_dropout
     self.lstm_dropout = lstm_dropout
     self.fc_dropout = fc_dropout
     self.label_size = load_json(os.path.join(source_dir,
                                              'label.json'))["label_size"]
     self.load_data(source_dir)
Ejemplo n.º 4
0
 def test_update_json1(self):
     data1 = {'a': 1, 'b': 2}
     data2 = {'c': 3, 'd': 4}
     data3 = {'a': 1, 'c': 3, 'b': 2, 'd': 4}
     output_file = os.path.join(fixture_dir,
                                'foo_{0}.json'.format(t.timestamp()))
     t.write_json(object=data1, output_file=output_file)
     t.update_json(data=data2, input_file=output_file)
     data4 = t.load_json(input_file=output_file)
     self.assertTrue(
         data3 == data4,
         'Data read from JSON file does not match expected output')
Ejemplo n.º 5
0
def make_coin_objects(external):
    coin_info = load_json('coin_info.json')
    coins = []
    for coin in coin_info.keys():
        #Fetch the abi if key isn't prevent in file
        #Allows for adding coins without looking for abi
        coin_info[coin]['abi'] = coin_info[coin].get('abi', get_abi(coin['address']))
        coins.append(Coin(info['address'], coin_info[coin]['abi'], external))

    with open('coin_info.json',w) as f:
        json.dump(coin_info, f)

    return coins
Ejemplo n.º 6
0
 def get_train_test_from_file(self, path):
     data = load_json(path)
     x = list()
     y = list()
     for line in data:
         x.append(line['sentence'])
         y.append(line['label'])
     if self.max_len is None:
         self.max_len = max(map(lambda x: len(x), x))
     x = sequence.pad_sequences(x,
                                maxlen=self.max_len,
                                padding='post',
                                truncating='post')
     return x, y
Ejemplo n.º 7
0
def run(protocol, files, csvfile):
    tools.seed_random(1234)

    for path in sorted(glob.glob(files)):
        state = tools.load_json(path)
        (node_count, link_count) = tools.json_count(state)

        print(f'run {protocol} on {path}')

        network.apply(state=state, link_command=get_tc_command)

        tools.sleep(10)

        for offset in range(0, 60, 2):
            tmp_ms = tools.millis()
            traffic_beg = tools.traffic()
            traffic_ms = tools.millis() - tmp_ms

            tmp_ms = tools.millis()
            software.start(protocol)
            software_ms = tools.millis() - tmp_ms

            # Wait until wait seconds are over, else error
            tools.sleep(offset)

            paths = tools.get_random_paths(state, 2 * 200)
            paths = tools.filter_paths(state,
                                       paths,
                                       min_hops=2,
                                       path_count=200)
            ping_result = tools.ping_paths(paths=paths,
                                           duration_ms=2000,
                                           verbosity='verbose')

            traffic_end = tools.traffic()

            sysload_result = tools.sysload()

            software.clear()

            # add data to csv file
            extra = (['node_count', 'traffic_ms', 'software_ms', 'offset_ms'],
                     [node_count, traffic_ms, software_ms, offset * 1000])
            tools.csv_update(csvfile, '\t', extra,
                             (traffic_end - traffic_beg).getData(),
                             ping_result.getData(), sysload_result)

        network.clear()
Ejemplo n.º 8
0
    def __init__(self, json_labels=None, data_dir=None, group=None):
        self._log = logging.getLogger('root')
        self._log.info(f'Loading labels from {json_labels} for group: {group}')
        self._labels = tools.load_json(json_labels)
        self._group = group
        self._data_dir = tools.str2path(data_dir)

        self._labels = self._filter_labels()
        self._group_labels = self._get_group_labels()
        self._numerical_labels, self._labels2num = self._get_numerical_labels()
        self._labels_counts = self._get_labels_counts()
        self._one_hot_labels = to_categorical(self._numerical_labels)
        self._total_count = len(self._group_labels)
        self._inverse_frequency = self._get_inverse_frequency()
        self._proportions = self._get_proportions()
        self._image2one_hot = self._get_image2one_hot()
Ejemplo n.º 9
0
def run(protocol, files, csvfile):
    for path in sorted(glob.glob(files)):
        state = tools.load_json(path)
        (node_count, link_count) = tools.json_count(state)

        # Limit node count to 300
        if node_count > 300:
            continue

        print(f'run {protocol} on {path}')

        network.apply(state=state,
                      link_command=get_tc_command,
                      remotes=remotes)

        tools.sleep(10)

        software_start_ms = tools.millis()
        software.start(protocol, remotes)
        software_startup_ms = tools.millis() - software_start_ms

        tools.sleep(300)

        start_ms = tools.millis()
        traffic_beg = tools.traffic(remotes)

        paths = tools.get_random_paths(state, 2 * 200)
        paths = tools.filter_paths(state, paths, min_hops=2, path_count=200)
        ping_result = tools.ping_paths(remotes=remotes,
                                       paths=paths,
                                       duration_ms=300000,
                                       verbosity='verbose')

        traffic_ms = tools.millis() - start_ms
        traffic_end = tools.traffic(remotes)

        sysload_result = tools.sysload(remotes)

        software.clear(remotes)
        network.clear(remotes)

        # add data to csv file
        extra = (['node_count', 'traffic_ms', 'software_startup_ms'],
                 [node_count, traffic_ms, software_startup_ms])
        tools.csv_update(csvfile, '\t', extra,
                         (traffic_end - traffic_beg).getData(),
                         ping_result.getData(), sysload_result)
Ejemplo n.º 10
0
def run(protocol, csvfile):
	tools.seed_random(1377)

	for path in sorted(glob.glob(f'../../data/freifunk/*.json')):
		state = tools.load_json(path)

		(node_count, link_count) = tools.json_count(state)
		dataset_name = '{}-{:04d}'.format(os.path.basename(path)[9:-5], node_count)

		# limit to what the host can handle
		if node_count > 310:
			continue

		print(f'run {protocol} on {path}')

		state = network.apply(remotes=remotes, state=state, link_command=get_tc_command)
		tools.sleep(10)

		software.start(protocol, remotes)

		tools.sleep(300)

		start_ms = tools.millis()
		traffic_beg = tools.traffic(remotes)

		paths = tools.get_random_paths(state, 2 * node_count)
		paths = tools.filter_paths(state, paths, min_hops=2, path_count=node_count)
		ping_result = tools.ping_paths(remotes=remotes, paths=paths, duration_ms=300000, verbosity='verbose')

		sysload_result = tools.sysload(remotes)

		traffic_ms = tools.millis() - start_ms
		traffic_end = tools.traffic(remotes)
		software.clear(remotes)

		# add data to csv file
		extra = (['dataset_name', 'node_count', 'traffic_ms'], [dataset_name, node_count, traffic_ms])
		tools.csv_update(csvfile, '\t', extra, (traffic_end - traffic_beg).getData(), ping_result.getData(), sysload_result)

		network.clear(remotes)
Ejemplo n.º 11
0
def build_bar():
    icon_theme = Gtk.IconTheme.get_default()
    orientation = Gtk.Orientation.VERTICAL if args.vertical else Gtk.Orientation.HORIZONTAL
    box = Gtk.Box(orientation=orientation)

    appendix = load_json(build_from_file)
    for entry in appendix:
        name = entry["name"]
        exec = entry["exec"]
        icon = entry["icon"]
        image = None
        if icon.startswith('/'):
            try:
                pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(icon, args.s, args.s)
                image = Gtk.Image.new_from_pixbuf(pixbuf)
            except:
                pass
        else:
            try:
                if icon.endswith('.svg') or icon.endswith('.png'):
                    icon = entry.icon.split('.')[0]
                pixbuf = icon_theme.load_icon(icon, args.s, Gtk.IconLookupFlags.FORCE_SIZE)
                image = Gtk.Image.new_from_pixbuf(pixbuf)
            except:
                pass

        button = Gtk.Button()
        button.set_property("name", "button")
        button.set_always_show_image(True)
        button.set_image(image)
        button.set_image_position(Gtk.PositionType.TOP)
        button.set_label(name)
        button.set_property("width_request", args.bw)
        button.set_property("height_request", args.bh)
        button.connect('clicked', launch, exec)
        box.pack_start(button, False, False, int(args.p / 2))

    return box
Ejemplo n.º 12
0
def build_menu():
    icon_theme = Gtk.IconTheme.get_default()
    menu = Gtk.Menu()

    if not args.no_menu:
        win.search_item = Gtk.MenuItem()
        win.search_item.add(win.search_box)
        win.search_item.set_sensitive(False)
        menu.add(win.search_item)

        # Prepend favourite items (-f or -fn argument used)
        favs_number = 0
        if args.favourites:
            favs_number = 5
        elif args.fn:
            favs_number = args.fn
        if favs_number > 0:
            global sorted_cache
            if len(sorted_cache) < favs_number:
                favs_number = len(sorted_cache)

            to_prepend = []  # list of favourite items
            for i in range(favs_number):
                fav_exec = sorted_cache[i][0]
                for item in all_entries:
                    if item.exec == fav_exec and item not in to_prepend:
                        to_prepend.append(item)
                        break  # stop searching, there may be duplicates on the list

            # build menu items
            for entry in to_prepend:
                name = entry.name
                exec = entry.exec
                icon = entry.icon
                hbox = Gtk.HBox()
                label = Gtk.Label()
                label.set_text(name)
                image = None
                if icon.startswith('/'):
                    try:
                        pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
                            icon, args.s, args.s)
                        image = Gtk.Image.new_from_pixbuf(pixbuf)
                    except:
                        pass
                else:
                    try:
                        if icon.endswith('.svg') or icon.endswith('.png'):
                            icon = entry.icon.split('.')[0]
                        pixbuf = icon_theme.load_icon(
                            icon, args.s, Gtk.IconLookupFlags.FORCE_SIZE)
                        image = Gtk.Image.new_from_pixbuf(pixbuf)
                    except:
                        pass
                if image:
                    hbox.pack_start(image, False, False, 10)
                if name:
                    hbox.pack_start(label, False, False, 0)
                item = Gtk.MenuItem()
                item.set_property("name", "item-favorites")
                item.add(hbox)
                item.connect('activate', launch, exec)
                menu.append(item)

            if to_prepend:
                separator = Gtk.SeparatorMenuItem()
                separator.set_property("name", "separator")
                menu.append(separator)

        # actual system menu with submenus for each category
        if c_audio_video:
            append_submenu(c_audio_video, menu, 'AudioVideo')
        if c_development:
            append_submenu(c_development, menu, 'Development')
        if c_game:
            append_submenu(c_game, menu, 'Game')
        if c_graphics:
            append_submenu(c_graphics, menu, 'Graphics')
        if c_network:
            append_submenu(c_network, menu, 'Network')
        if c_office:
            append_submenu(c_office, menu, 'Office')
        if c_science:
            append_submenu(c_science, menu, 'Science')
        if c_settings:
            append_submenu(c_settings, menu, 'Settings')
        if c_system:
            append_submenu(c_system, menu, 'System')
        if c_utility:
            append_submenu(c_utility, menu, 'Utility')
        if c_other:
            append_submenu(c_other, menu, 'Other')

    # user-defined menu from default or custom file (see args)
    if args.append or args.af or args.no_menu:
        if not args.no_menu:  # nothing above to separate
            separator = Gtk.SeparatorMenuItem()
            separator.set_property("name", "separator")
            menu.append(separator)
        appendix = load_json(build_from_file)
        for entry in appendix:
            name = entry["name"]
            exec = entry["exec"]
            icon = entry["icon"]
            hbox = Gtk.HBox()
            label = Gtk.Label()
            label.set_text(name)
            image = None
            if icon.startswith('/'):
                try:
                    pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
                        icon, args.s, args.s)
                    image = Gtk.Image.new_from_pixbuf(pixbuf)
                except:
                    pass
            else:
                try:
                    if icon.endswith('.svg') or icon.endswith('.png'):
                        icon = entry.icon.split('.')[0]
                    pixbuf = icon_theme.load_icon(
                        icon, args.s, Gtk.IconLookupFlags.FORCE_SIZE)
                    image = Gtk.Image.new_from_pixbuf(pixbuf)
                except:
                    pass
            if image:
                hbox.pack_start(image, False, False, 10)
            if name:
                hbox.pack_start(label, False, False, 0)
            item = Gtk.MenuItem()
            item.set_property("name", "item-appendix")
            item.add(hbox)
            item.connect('activate', launch, exec, True)  # do not cache!
            menu.append(item)

    menu.connect("hide", win.die)
    menu.set_property("reserve_toggle_size", False)
    menu.show_all()

    return menu
Ejemplo n.º 13
0
def main():
    # exit if already running, thanks to Slava V at https://stackoverflow.com/a/384493/4040598
    pid_file = os.path.join(tempfile.gettempdir(), 'sgtk-menu.pid')
    fp = open(pid_file, 'w')
    try:
        fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
    except IOError:
        subprocess.run("pkill -f sgtk-menu", shell=True)
        sys.exit(2)

    global build_from_file
    parser = argparse.ArgumentParser(
        description="GTK menu for sway, i3 and some floating WMs")
    placement = parser.add_mutually_exclusive_group()
    placement.add_argument("-b",
                           "--bottom",
                           action="store_true",
                           help="display menu at the bottom (sway & i3 only)")
    placement.add_argument("-c",
                           "--center",
                           action="store_true",
                           help="center menu on the screen (sway & i3 only)")

    favourites = parser.add_mutually_exclusive_group()
    favourites.add_argument("-f",
                            "--favourites",
                            action="store_true",
                            help="prepend 5 most used items")
    favourites.add_argument('-fn',
                            type=int,
                            help="prepend <FN> most used items")

    appendix = parser.add_mutually_exclusive_group()
    appendix.add_argument(
        "-a",
        "--append",
        action="store_true",
        help="append custom menu from {}".format(build_from_file))
    appendix.add_argument("-af",
                          type=str,
                          help="append custom menu from {}".format(
                              os.path.join(config_dir, '<AF>')))

    parser.add_argument("-n",
                        "--no-menu",
                        action="store_true",
                        help="skip menu, display appendix only")
    parser.add_argument("-l",
                        type=str,
                        help="force language (e.g. \"de\" for German)")
    parser.add_argument("-s",
                        type=int,
                        default=20,
                        help="menu icon size (min: 16, max: 48, default: 20)")
    parser.add_argument(
        "-w",
        type=int,
        help="menu width in px (integer, default: screen width / 8)")
    parser.add_argument(
        "-d",
        type=int,
        default=100,
        help="menu delay in milliseconds (default: 100; sway & i3 only)")
    parser.add_argument(
        "-o",
        type=float,
        default=0.3,
        help="overlay opacity (min: 0.0, max: 1.0, default: 0.3; "
        "sway & i3 only)")
    parser.add_argument("-t",
                        type=int,
                        default=30,
                        help="sway submenu lines limit (default: 30)")
    parser.add_argument(
        "-y",
        type=int,
        default=0,
        help="y offset from edge to display menu at (sway & i3 only)")
    parser.add_argument(
        "-css",
        type=str,
        default="style.css",
        help="use alternative {} style sheet instead of style.css".format(
            os.path.join(config_dir, '<CSS>')))
    global args
    args = parser.parse_args()
    css_file = os.path.join(config_dirs()[0], args.css) if os.path.exists(
        os.path.join(config_dirs()[0], 'style.css')) else None

    if args.s < 16:
        args.s = 16
    elif args.s > 48:
        args.s = 48

    # We do not need any delay in other WMs
    if other_wm:
        args.d = 0

    # Create default config files if not found
    create_default_configs(config_dir)

    # Replace appendix file name with custom - if any
    if args.af:
        build_from_file = os.path.join(config_dirs()[0], args.af)

    if css_file:
        screen = Gdk.Screen.get_default()
        provider = Gtk.CssProvider()
        try:
            provider.load_from_path(css_file)
            Gtk.StyleContext.add_provider_for_screen(
                screen, provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
        except Exception as e:
            print(e)

    # cache stores number of clicks on each item
    global cache
    cache = load_json(cache_file)

    if not cache:
        save_json(cache, cache_file)
    global sorted_cache
    sorted_cache = sorted(cache.items(), reverse=True, key=lambda x: x[1])

    global locale
    locale = get_locale_string(args.l)
    category_names_dictionary = localized_category_names(locale)

    # replace additional category names with main ones
    for name in category_names:
        main_category_name = additional_to_main(name)
        try:
            localized_names_dictionary[
                main_category_name] = category_names_dictionary[
                    main_category_name]
        except:
            pass

    screen = Gdk.Screen.get_default()
    provider = Gtk.CssProvider()
    style_context = Gtk.StyleContext()
    style_context.add_provider_for_screen(
        screen, provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)

    # find all .desktop entries, create DesktopEntry class instances;
    # DesktopEntry adds itself to the proper List in the class constructor
    list_entries()

    # Overlay window
    global win
    win = MainWindow()
    if other_wm:
        # We need this to obtain the screen geometry when i3ipc module unavailable
        win.resize(1, 1)
        win.show_all()
    global geometry
    # If we're not on sway neither i3, this won't return values until the window actually shows up.
    # Let's try as many times as needed. The retries int protects from an infinite loop.
    retries = 0
    while geometry[0] == 0 and geometry[1] == 0 and geometry[
            2] == 0 and geometry[3] == 0:
        geometry = display_geometry()
        retries += 1
        if retries > 500:
            print("\nFailed to get the current screen geometry, exiting...\n")
            sys.exit(2)
    x, y, w, h = geometry

    if not other_wm:
        win.resize(w, h)
    else:
        win.resize(1, 1)
        win.set_gravity(Gdk.Gravity.CENTER)
        if pynput:
            x, y = mouse_pointer.position
            win.move(x, y)
        else:
            win.move(0, 0)
            print("\nYou need the python-pynput package!\n")

    win.set_skip_taskbar_hint(True)
    win.menu = build_menu()
    win.menu.set_property("name", "menu")

    global menu_items_list
    menu_items_list = win.menu.get_children()

    win.menu.propagate_key_event = False
    win.menu.connect("key-release-event", win.search_items)
    # Let's reserve some width for long entries found with the search box
    if args.w:
        win.menu.set_property("width_request", args.w)
    else:
        win.menu.set_property("width_request",
                              int(win.screen_dimensions[0] / 8))
    win.show_all()

    GLib.timeout_add(args.d, open_menu)
    Gtk.main()
Ejemplo n.º 14
0
import logging

logging.basicConfig(format='%(process)d-%(levelname)s-%(message)s')

# input args
parser = argparse.ArgumentParser()
parser.add_argument("--output_path", help="path to save metadata to", action="store", required=True)
parser.add_argument("--video_metadata_path", help="path to video metadata", action="store", required=True)

args = parser.parse_args()

output_data_path = args.output_path
video_metadata_path = args.video_metadata_path


video_metadata = tools.load_json(video_metadata_path)

authors_distribution = Counter(chain.from_iterable(map(lambda x: x["author_names"],
                                                       chain.from_iterable(video_metadata.values())))).most_common()

duration_distribution = Counter(map(lambda x: x["duration"],
                                    chain.from_iterable(video_metadata.values()))).most_common()

youtube_id_distribution = Counter(map(lambda x: x["youtube_id"],
                                      chain.from_iterable(video_metadata.values()))).most_common()

keywords_distribution = Counter(chain.from_iterable(map(lambda x: x["keywords"].split(','),
                                                        chain.from_iterable(video_metadata.values())))).most_common()


tools.save_json(os.path.join(output_data_path, 'authors_distribution.json'), authors_distribution)
Ejemplo n.º 15
0
def main():
    parser = argparse.ArgumentParser(description='JSON file')
    parser.add_argument("--path",
                        dest="json_path",
                        type=str,
                        help='path to json file. defaults to params.json',
                        default="params.json")
    args = parser.parse_args()
    print(args.json_path)
    params = load_json(args.json_path)

    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225])

    if params["dataset"] == "CUB":
        preprocess = transforms.Compose(
            [transforms.Resize(256),
             transforms.ToTensor(), normalize])
        from configs.config_CUB import MAIN_DATAPATH, TEST_DATAPATH
        att_split = sio.loadmat(params["CUB_paths"]["att_split"])
        root = params["CUB_paths"]["root"]
        metaData = sio.loadmat(params["CUB_paths"]["metaData"])
        print("CUB Dataset chosen.")
        dataloader_placeholder = CUBDataset

    elif params["dataset"] == "AWA2":
        preprocess = transforms.Compose([
            transforms.Resize(256),
            transforms.ToTensor(),
        ])
        from configs.config_AWA2 import MAIN_DATAPATH, TEST_DATAPATH
        att_split = sio.loadmat(params["AWA2_paths"]["att_split"])
        root = params["AWA2_paths"]["root"]
        metaData = sio.loadmat(params["AWA2_paths"]["metaData"])
        dataloader_placeholder = AWADataset
        print("AWA2 Dataset chosen.")

    elif params["dataset"] == "SUN":
        preprocess = transforms.Compose(
            [transforms.Resize(256),
             transforms.ToTensor(), normalize])
        from configs.config_SUN import MAIN_DATAPATH, TEST_DATAPATH
        att_split = sio.loadmat(params["SUN_paths"]["att_split"])
        root = params["SUN_paths"]["root"]
        metaData = sio.loadmat(params["SUN_paths"]["metaData"])
        print("SUN Dataset chosen.")
        dataloader_placeholder = SunDataset

    else:
        print("Invalid dataset chosen. ")
        sys.exit()

    all_class_vector = load_data(MAIN_DATAPATH + 'all_class_vec.mat',
                                 "all_class_vec")
    train_val_labels = load_data(TEST_DATAPATH + 'trainval_labels.mat',
                                 'trainval_labels')
    unseen_labels = load_data(TEST_DATAPATH + 'test_unseen_labels.mat',
                              'test_unseen_labels')

    seenClassIndices = np.unique(train_val_labels)
    unseenClassIndices = np.unique(unseen_labels)

    unseenVectors = torch.from_numpy(
        all_class_vector[unseenClassIndices, :]).float().cuda()
    allVectors = torch.from_numpy(all_class_vector).float().cuda()

    trainval_indexes = att_split["trainval_loc"]
    test_unseen_indexes = att_split["test_unseen_loc"]
    test_seen_indexes = att_split["test_seen_loc"]

    files = metaData["image_files"]
    labels = metaData["labels"]

    dataloader_zsl = DataLoader(dataloader_placeholder(test_unseen_indexes,
                                                       files,
                                                       labels,
                                                       root,
                                                       zsl=True,
                                                       transform=preprocess),
                                batch_size=1,
                                shuffle=params["shuffle_dataset"],
                                num_workers=params["num_workers"],
                                pin_memory=params["pin_memory"])

    dataloader_unseen = DataLoader(dataloader_placeholder(
        test_unseen_indexes, files, labels, root, transform=preprocess),
                                   batch_size=1,
                                   shuffle=params["shuffle_dataset"],
                                   num_workers=params["num_workers"],
                                   pin_memory=params["pin_memory"])
    dataloader_seen = DataLoader(dataloader_placeholder(test_seen_indexes,
                                                        files,
                                                        labels,
                                                        root,
                                                        transform=preprocess),
                                 batch_size=1,
                                 shuffle=params["shuffle_dataset"],
                                 num_workers=params["num_workers"],
                                 pin_memory=params["pin_memory"])

    criterion = torch.nn.CrossEntropyLoss(reduction='sum')

    if params["zsl_test"]:
        zsl_launch(dataloader_zsl, unseenVectors, criterion, params)

    print("-------" * 20)
    if params["gzsl_test"]:
        gzsl_launch(dataloader_seen, dataloader_unseen, allVectors, criterion,
                    params)
Ejemplo n.º 16
0
def main():
    # exit if already running, thanks to Slava V at https://stackoverflow.com/a/384493/4040598
    pid_file = os.path.join(tempfile.gettempdir(), 'sgtk-menu.pid')
    fp = open(pid_file, 'w')
    try:
        fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
    except IOError:
        sys.exit(0)

    global appendix_file
    parser = argparse.ArgumentParser(description="GTK menu for sway and i3")
    parser.add_argument("-b",
                        "--bottom",
                        action="store_true",
                        help="display menu at the bottom")
    favourites = parser.add_mutually_exclusive_group()
    favourites.add_argument("-f",
                            "--favourites",
                            action="store_true",
                            help="prepend 5 most used items")
    favourites.add_argument('-fn',
                            type=int,
                            help="prepend <FN> most used items")
    appenxid = parser.add_mutually_exclusive_group()
    appenxid.add_argument(
        "-a",
        "--append",
        action="store_true",
        help="append custom menu from {}".format(appendix_file))
    appenxid.add_argument("-af",
                          type=str,
                          help="append custom menu from {}".format(
                              os.path.join(config_dir, '<AF>')))
    parser.add_argument("-l",
                        type=str,
                        help="force language (e.g. \"de\" for German)")
    parser.add_argument("-s",
                        type=int,
                        default=20,
                        help="menu icon size (min: 16, max: 48, default: 20)")
    parser.add_argument(
        "-w",
        type=int,
        help="menu width in px (integer, default: screen width / 8)")
    parser.add_argument("-d",
                        type=int,
                        default=100,
                        help="menu delay in milliseconds (default: 100)")
    parser.add_argument(
        "-o",
        type=float,
        default=0.3,
        help="overlay opacity (min: 0.0, max: 1.0, default: 0.3)")
    parser.add_argument("-t",
                        type=int,
                        default=30,
                        help="sway submenu lines limit (default: 30)")
    global args
    args = parser.parse_args()
    if args.s < 16:
        args.s = 16
    elif args.s > 48:
        args.s = 48

    # Create default appendix file if not found
    if not os.path.isfile(appendix_file):
        save_default_appendix(appendix_file)

    # Replace appendix file name with custom - if any
    if args.af:
        appendix_file = os.path.join(config_dirs()[0], args.af)

    # cache stores number of clicks on each item
    global cache
    cache = load_json(cache_file)

    if not cache:
        save_json(cache, cache_file)
    global sorted_cache
    sorted_cache = sorted(cache.items(), reverse=True, key=lambda x: x[1])

    global locale
    locale = get_locale_string(args.l)
    category_names_dictionary = localized_category_names(locale)

    # replace additional category names with main ones
    for name in category_names:
        main_category_name = additional_to_main(name)
        try:
            localized_names_dictionary[
                main_category_name] = category_names_dictionary[
                    main_category_name]
        except:
            pass

    screen = Gdk.Screen.get_default()
    provider = Gtk.CssProvider()
    style_context = Gtk.StyleContext()
    style_context.add_provider_for_screen(
        screen, provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)

    # find all .desktop entries, create DesktopEntry class instances;
    # DesktopEntry adds itself to the proper List in the class constructor
    list_entries()

    # Overlay window
    global win
    win = MainWindow()
    w, h = display_dimensions()
    win.resize(w, h)
    win.menu = build_menu()

    global menu_items_list
    menu_items_list = win.menu.get_children()

    win.menu.propagate_key_event = False
    win.menu.connect("key-release-event", win.search_items)
    # Let's reserve some width for long entries found with the search box
    if args.w:
        win.menu.set_property("width_request", args.w)
    else:
        win.menu.set_property("width_request",
                              int(win.screen_dimensions[0] / 8))
    win.show_all()

    GLib.timeout_add(args.d, open_menu)
    Gtk.main()
Ejemplo n.º 17
0
    def run(self):
        like_count = 0
        # hashtags_dict_analysis = {'searched_hashtags': {}} #TODO

        # Forbidden hashtags
        forbidden = tools.load_json(
            os.path.join(os.getcwd(), 'forbidden_hashtags.json'))
        forbidden = [str(i) for i in forbidden['hashtags']]

        driver = webdriver.Chrome()
        driver.maximize_window()
        driver.implicitly_wait(5)

        url = 'https://www.instagram.com/?hl=en'
        driver.get(url)

        # Access to log in page
        driver.find_element_by_xpath(
            '//*[@id="react-root"]/section/main/article/div[2]/div[2]/p/a'
        ).click()
        driver.implicitly_wait(5)
        # Log in task
        log = tools.load_json(os.path.join(os.getcwd(), 'info.json'))
        tools.login(driver, log)
        driver.implicitly_wait(5)

        # Search bar
        while True:  #loop pour pick le hashtag randomly
            like_limit = random.randint(25, self.max_like_per_round)
            hashtag = random.choice(log['search'])
            # hashtags_dict_analysis['searched_hashtags'][hashtag] = {}

            search = driver.find_element_by_css_selector(
                'input[placeholder="Search"]')  # find Search bar
            search.send_keys(hashtag)
            driver.implicitly_wait(5)

            # Click on first link
            driver.find_element_by_xpath(
                '//*[@id="react-root"]/section/nav/div[2]/div/div/div[2]/div[2]/div[2]/div/a[1]'
            ).click()
            driver.implicitly_wait(5)

            # Click on 'Load More'
            try:
                driver.execute_script(
                    "window.scrollTo(0, document.body.scrollHeight);")
                driver.find_element_by_link_text('Load more').click()
            except NoSuchElementException:
                pass

            # Click on first photo
            first_photo_xpath = '//*[@id="react-root"]/section/main/article/div[2]/div[1]/div[1]/div[1]/a'
            first_photo = driver.find_element_by_xpath(first_photo_xpath)
            ActionChains(driver).move_to_element(first_photo).perform(
            )  # remonter vers la 1er image de 'Most recent'

            count_row_scroll = 0
            # hashtag_count = 1
            try:
                row = 1
                while True:  # tant qu'on peut scroll
                    count_row_scroll += 1
                    for col in [1, 2, 3]:
                        col = str(col)
                        image_i_xpath = '//*[@id="react-root"]/section/main/article/div[2]/div[1]/div[' + str(
                            row) + ']/div[' + col + ']/a'
                        image_i = driver.find_element_by_xpath(image_i_xpath)
                        ActionChains(driver).move_to_element(image_i).perform()
                        image_i.click()

                        username_xpath = '/html/body/div[3]/div/div/div[2]/div/article/header/div[2]/div[1]/div/a'
                        username = driver.find_element_by_xpath(
                            username_xpath).get_attribute('title')
                        if 'shop' in username:  # TODO A RETRAVAILLER
                            pass
                        else:
                            caption_xpath = '/html/body/div[3]/div/div/div[2]/div/article/div[2]/div[1]/ul/li'
                            caption = driver.find_element_by_xpath(
                                caption_xpath).text

                            # Get hashtags with regex
                            # hashtags_ = re.findall(r"#(\w+)", caption)
                            # hashtags_ = list(set(hashtags_))
                            # any_in = lambda a, b: bool(set(a).intersection(b))  # intersection between 2 lists
                            # if any_in(forbidden, hashtags_):
                            #     pass
                            # else:
                            # hashtags_dict_analysis['searched_hashtags'][hashtag] = hashtags_
                            # hashtag_count += 1
                            # Follow, like
                            try:  # a mettre dans le else
                                if driver.find_element_by_xpath(
                                        "//button[contains(.,'Following')]"):
                                    print(username + 'is already followed')
                            except NoSuchElementException:
                                driver.find_element_by_xpath(
                                    "//button[contains(.,'Follow')]").click()
                                time.sleep(random.uniform(0.2, 0.8))
                                try:
                                    driver.find_element_by_xpath(
                                        "//span[contains(.,'Like')]").click()
                                    like_count += 1
                                    # hashtag_count += 1
                                except NoSuchElementException:  # if already liked [contains(., 'Unlike)]
                                    pass
                            finally:
                                time.sleep(random.uniform(0.2, 0.8))
                                driver.find_element_by_xpath(
                                    "//button[contains(.,'Close')]").click()
                                driver.implicitly_wait(5)

                    # Scroll down
                    if count_row_scroll == 4:  # 1 new load = 4 lignes de charger
                        driver.execute_script(
                            "window.scrollTo(0, document.body.scrollHeight);")
                        time.sleep(random.uniform(0.3, 0.8))
                        count_row_scroll = 0

                    row += 1

                    if like_count == like_limit:  # tous les X likes, stop
                        stop_ = random.uniform(20, 45)
                        print(time.localtime(), stop_)
                        time.sleep(60 * stop_)
                        like_limit = random.randint(25,
                                                    self.max_like_per_round)
                        print('Onto the next : %s' % (time.strftime(
                            '%d/%m/20%y_%H:%M:%S', time.localtime())))

            except NoSuchElementException:  # if no more image
                pass  # look for another hashtag in search bar

            if time.localtime()[7] == self.start[7]:  #si meme jour
                if time.localtime()[3] == self.start[3] + 10:
                    self.restart()
            else:
                self.restart()
Ejemplo n.º 18
0
import argparse
import logging

logging.basicConfig(format='%(process)d-%(levelname)s-%(message)s')

# input args
parser = argparse.ArgumentParser()
parser.add_argument("--output_path", help="path to save metadata to", action="store", required=True)
parser.add_argument("--topic_tree_backup", help="path to backup of Khan academy topic tree", action="store", required=True)

args = parser.parse_args()

output_data_path = args.output_path
topic_tree_backup = args.topic_tree_backup

topic_tree = tools.load_json(topic_tree_backup)


metadata_mask = ['author_names', 'creation_date', 'date_added', 'description', 'description_html', 'duration',
                 'image_url', 'ka_url', 'keywords', 'readable_id', 'slug', 'thumbnail_urls', 'title',
                 'translated_description', 'translated_description_html', 'translated_title', 'translated_youtube_id',
                 'translated_youtube_lang', 'youtube_id']

topic_tree_leafs = tools.get_leafs(topic_tree)

logging.info('Gathering video metadata')
video_metadata = defaultdict(list)
for leaf in topic_tree_leafs:
    video_metadata[leaf['youtube_id']].append({key: leaf[key] for key in metadata_mask})

video_metadata = dict(video_metadata)
Ejemplo n.º 19
0
import docopt
import requests
import os
import logging
from tools import load_json
from pkg_resources import resource_filename
from toolz import partition_all, compose
from functools import partial
from tools import load_json_lines, write_json_lines, post_json, log_stream, key_to_pos

# google location api

g_map = "https://maps.googleapis.com/maps/api/{method}/json?"
google_key = os.environ['GOOGLE_MAPS_KEY']
continent_map = load_json(resource_filename(__name__,
                                            'country_continent.json'))


def _extract_name_from_affiliations(affiliations):
    for a in affiliations:
        yield a['info']


def get_location(info):
    params = {'key': google_key, 'query': info['Name']}
    url = g_map.format(method='place/textsearch')
    resp = requests.get(url=url, params=params)
    resp = resp.json()

    if len(resp["results"]):
        address = resp["results"][0]["formatted_address"]
Ejemplo n.º 20
0
import numpy as np
import matplotlib.pyplot as plt
from tools import load_json
plt.switch_backend('agg')  # 防止ssh上绘图问题

data1 = load_json('./logs/ResNet18_training_monitor.json')
data2 = load_json('./logs/ResNet18_label_smoothing_training_monitor.json')

N = np.arange(0, len(data1['loss']))
plt.style.use("ggplot")
plt.figure()
plt.plot(N, data1['loss'], label=f"ResNet18")
plt.plot(N, data2['loss'], label=f"ResNet18_label_smooth")
plt.legend()
plt.xlabel("Epoch #")
plt.ylabel('loss')
plt.title(f"Training loss [Epoch {len(data1['loss'])}]")
plt.savefig('./png/training_loss.png')
plt.close()

N = np.arange(0, len(data1['loss']))
plt.style.use("ggplot")
plt.figure()
plt.plot(N, data1['valid_acc'], label=f"ResNet18")
plt.plot(N, data2['valid_acc'], label=f"ResNet18_label_smooth")
plt.legend()
plt.xlabel("Epoch #")
plt.ylabel('accuracy')
plt.title(f"Valid accuracy [Epoch {len(data1['loss'])}]")
plt.savefig('./png/valid_accuracy.png')
plt.close()
Ejemplo n.º 21
0

def create_aff_body(ids):
    return {
        "path": "/aff",
        "aff": {
            "type": "Affiliation",
            "id": ids,
            "select": ["Name"],
        }
    }


g_map = "https://maps.googleapis.com/maps/api/{method}/json?"
google_key = os.environ['GOOGLE_MAPS_KEY']
continent_map = load_json('country_continent.json')


def get_location(name):
    params = {'key': google_key, 'query': name}
    url = g_map.format(method='place/textsearch')
    resp = requests.get(url=url, params=params)
    resp = resp.json()

    if len(resp["results"]):
        address = resp["results"][0]["formatted_address"]

        params = {'key': google_key, 'address': address}
        url = g_map.format(method='geocode')
        resp = requests.get(url=url, params=params)
        resp = resp.json()