Ejemplo n.º 1
0
    def __init__(self, annotations_file, words_count, vocab=None):
        if not os.path.isfile(annotations_file):
            archive_path = os.path.join(config.base_path, 'captions_train-val2014.zip')
            ensure_file(archive_path, 'http://msvocds.blob.core.windows.net/annotations-1-0-3/captions_train-val2014.zip')
            z = zipfile.ZipFile(archive_path)
            infolist = []
            for zipinfo in z.infolist():
                zipinfo.filename = os.path.basename(zipinfo.filename)
                infolist.append(zipinfo)

            z.extractall(config.coco_path, infolist)

        with open(annotations_file) as f:
            self.dataset = json.load(f)

        self.images = {image['id']: image for image in self.dataset['images']}

        self.img_to_anns = defaultdict(list)
        self.annotations = {}
        for ann in self.dataset['annotations']:
            self.annotations[ann['id']] = ann
            self.img_to_anns[ann['image_id']] += [ann]

        if vocab is not None:
            self.vocab = vocab
        else:
            all_words = []
            for ann in self.load_annotations(self.img_ids()):
                all_words += word_tokenize(ann['caption'].lower())
            self.vocab = Vocab(all_words, words_count)
Ejemplo n.º 2
0
def expand_set_files(bot, persistence, set_data):
    for sticker in set_data["stickers"]:
        file_id = sticker["file_id"]
        file = ensure_file(bot, persistence, file_id)
        yield {
            "set_name": set_data["name"],
            "set_title": set_data["title"],
            "emoji": sticker["emoji"],
            "file_id": file_id,
            "url": file["file_path"],
            "size": file["file_size"],
        }
Ejemplo n.º 3
0
    def __init__(self):
        models_dir = config.base_path
        net_file = os.path.join(models_dir, 'tensorflow_inception_graph.pb')
        synset_file = os.path.join(models_dir, 'imagenet_comp_graph_label_strings.txt')

        ensure_dir(models_dir)
        if not (os.path.isfile(net_file) and os.path.isfile(synset_file)):
            archive_path = os.path.join(models_dir, 'inception5h.zip')
            ensure_file(archive_path, 'https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip')
            z = zipfile.ZipFile(archive_path)
            z.extractall(models_dir)

        self.synset = []
        with open(synset_file) as f:
            for line in f:
                self.synset.append(line)

        graph_def = tf.GraphDef()
        with open(net_file) as f:
            graph_def.ParseFromString(f.read())
        tf.import_graph_def(graph_def)
        self.session = tf.Session()
Ejemplo n.º 4
0
def workflow_single_ntuple(input_ntp, input_yml, output_suffix, aux_workflows,
                           cpp_template='../postprocess/cpp_templates/rdx.cpp',
                           **kwargs):
    input_ntp = ensure_file(input_ntp)
    print('{}Working on {}...{}'.format(TC.GREEN, input_ntp, TC.END))
    cpp_template = abs_path(cpp_template)

    bm_cmd = 'babymaker -i {} -o baby.cpp -n {} -t {}'

    aux_ntuples = [w(input_ntp, **kwargs) for w in aux_workflows]
    if aux_ntuples:
        bm_cmd += ' -f ' + ' '.join(aux_ntuples)

    bm_cmd = workflow_bm_cli(bm_cmd, **kwargs).format(
        abs_path(input_yml), input_ntp, cpp_template)

    run_cmd(bm_cmd, **kwargs)
    workflow_compile_cpp('baby.cpp', **kwargs)
    run_cmd('./baby.exe --{}'.format(output_suffix), **kwargs)