Exemplo n.º 1
0
    def gen(self, batch_size):

        dataset = self.dataset.batch(batch_size)
        iterator = dataset.make_one_shot_iterator()

        images, labels, comments = iterator.get_next()
        with tf.Session(
                config=tf.ConfigProto(allow_soft_placement=True,
                                      device_count={'GPU': 0},
                                      intra_op_parallelism_threads=4,
                                      inter_op_parallelism_threads=4)) as sess:
            #        with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:

            while True:
                try:
                    raw_images, raw_labels, raw_comments = sess.run(
                        [images, labels, comments])
                    for img, lex, comment in zip(raw_images, raw_labels,
                                                 raw_comments):

                        if self.max_width and (Image.open(IO(img)).size[0] <=
                                               self.max_width):
                            word = self.convert_lex(lex)

                            bucket_size = self.bucket_data.append(
                                img, word, lex, comment)
                            if bucket_size >= batch_size:
                                bucket = self.bucket_data.flush_out(
                                    self.bucket_specs, go_shift=1)
                                yield bucket

                except tf.errors.OutOfRangeError:
                    break

        self.clear()
Exemplo n.º 2
0
 def csv_iter():
     rows = iter(table)
     fo = IO()
     csv_writer = csv.writer(fo)
     csv_writer.writerow(converter.header2())
     while True:
         try:
             for _ in range(1000):
                 row = next(rows)
                 #print row
                 csv_writer.writerow(row)
         except StopIteration:
             fo.seek(0)
             yield fo.read().encode('utf-8')
             del fo
             break
         fo.seek(0)
         data = fo.read().encode('utf-8')
         fo.seek(0)
         fo.truncate()
         yield data
     if converter.errors:
         yield 'The following errors were found at unspecified points in processing:\n'
         for error in converter.errors:
             yield str(error) + '\n'
Exemplo n.º 3
0
    def gen(self, batch_size):
        self.dataset = self.dataset.batch(batch_size)
        for batch in self.dataset:
            image, label, comment = batch

            image = [np.array(Image.open(IO(i)))[..., ::-1] for i in image.numpy()]
            label = [t.decode('utf-8') for t in label.numpy()]
            comment = comment.numpy()

            converted_label, length = self.text_converter.encode(label)
            converted_image = [self.image_converter(im) for im in image]
            yield converted_image, converted_label, length
Exemplo n.º 4
0
	def __getitem__(self, index):

		"""

		:param index: index point to the next sample
		:return: training sample
		"""

		image_id = self.list_IDs[index]  # image path is the image ID

		if not self.load_pickle:
			label = self.labels[image_id]
			original_image = self._pil_loader(os.path.join(self.root_folder, image_id))
		else:
			original_image = Image.open(IO(self.pickle_data[image_id]['data']))
			label = self.pickle_data[image_id]['label']
			if isinstance(label, (bytes, bytearray)):
				label = label.decode('ascii')

		label = label.rstrip().lower()
		image = self.input_transform(original_image)

		ltr_targets, rtl_targets = self.convert_tokens(label)

		ltr_target_y = self.one_hot_targets(ltr_targets)
		rtl_target_y = self.one_hot_targets(rtl_targets)

		mask = self.make_mask(ltr_targets)

		sample = {
			'images': image,
			'ltr_targets': ltr_targets,
			'rtl_targets': rtl_targets,
			'ltr_targets_y': ltr_target_y,
			'rtl_targets_y': rtl_target_y,
			'labels': label,
			'ids': image_id,
			'masks': mask,
		}

		if self.validation_set:
			sample['original_image'] = np.array(original_image)
		return sample
Exemplo n.º 5
0
    def gen(self, batch_size):
        half_batch_size = int(batch_size / 2)
        self.dataset = self.dataset.batch(half_batch_size)

        for batch in self.dataset:
            image, label, comment = batch

            image = [np.array(Image.open(IO(i)))[..., ::-1] for i in image.numpy()]
            label = [t.decode('utf-8') for t in label.numpy()]
            comment = comment.numpy()

            fake_image, fake_label = self.fake_generator.gen(half_batch_size)

            image += fake_image
            label += fake_label

            c = list(zip(image, label))
            random.shuffle(c)
            image, label = zip(*c)

            converted_label, length = self.text_converter.encode(label)
            converted_image = [self.image_converter(im) for im in image]
            yield converted_image, converted_label, length
Exemplo n.º 6
0
    def gen(self, batch_size):

        dataset = self.dataset.batch(batch_size)
        iterator = dataset.make_one_shot_iterator()

        images, labels, comments = iterator.get_next()
        with tf.Session(config=tf.ConfigProto(
                allow_soft_placement=True)) as sess:

            while True:
                try:
                    raw_images, raw_labels, raw_comments = sess.run(
                        [images, labels, comments])
                    for img, lex, comment in zip(raw_images, raw_labels,
                                                 raw_comments):

                        if self.max_width and (Image.open(IO(img)).size[0] <=
                                               self.max_width):
                            #print("LEX:",lex)
                            #print("LENLEX:", len(lex))
                            if len(lex) < self.max_label:
                                word = self.convert_lex(lex)
                                #print("WORD:", word)
                                #print("LENGTH:", len(word))

                                bucket_size = self.bucket_data.append(
                                    img, word, lex, comment)
                                if bucket_size >= batch_size:
                                    bucket = self.bucket_data.flush_out(
                                        self.bucket_specs, go_shift=1)
                                    yield bucket

                except tf.errors.OutOfRangeError:
                    break

        self.clear()
Exemplo n.º 7
0
 def __init__(self, on_stream, off_stream):
     self.capture_stream = IO()
     self.on_stream = on_stream
     self.off_stream = off_stream
     self.stream = on_stream
Exemplo n.º 8
0
 def makefile(self, *args, **kwargs):
     return IO(self.request)
Exemplo n.º 9
0
 def makefile(self, *args, **kwargs):
     return IO(self.path)