Exemplo n.º 1
0
    def __init__(
        self,
        space: gym.spaces.Dict,
        names: Iterable[str],
        *,
        embedding_size: int,
        layers: List[int],
    ):
        super().__init__()
        self.space = space

        num_embeddings = max(
            space['grid'].high.max() + 1,
            space['item'].high.max() + 1,
        )
        self.embedding = EmbeddingRepresentation(num_embeddings,
                                                 embedding_size)
        gv_models = [self._make_gv_model(name) for name in names]
        self.cat_representation = CatRepresentation(gv_models)
        self.fc_model: nn.Module

        if len(layers) > 0:
            dims = [self.cat_representation.dim] + layers
            linear_modules = [
                make_module('linear', 'relu', in_dim, out_dim)
                for in_dim, out_dim in mitt.pairwise(dims)
            ]
            relu_modules = [nn.ReLU() for _ in linear_modules]
            modules = mitt.interleave(linear_modules, relu_modules)
            self.fc_model = nn.Sequential(*modules)
            self._dim = dims[-1]

        else:
            self.fc_model = nn.Identity()
            self._dim = self.cat_representation.dim
Exemplo n.º 2
0
    def __str__(self):
        spacer = "++---+---+---++---+---+---++---+---+---++"
        size = len(self._board[0])
        spacers = (spacer if (i + 1) % 3 else spacer.replace('-', '=')
                   for i in range(size))
        fmt = "|| {} | {} | {} || {} | {} | {} || {} | {} | {} ||"

        formats = (fmt.format(*(cell or ' ' for cell in line))
                   for line in self._board)
        return spacer.replace('-', '=') + '\n' + '\n'.join(
            interleave(formats, spacers))
Exemplo n.º 3
0
 def validation_epoch_end(self, outputs):
     recon_images = list(
         interleave(
             filter(lambda x: x is not None,
                    [output.get("x") for output in outputs]),
             filter(lambda x: x is not None,
                    [output.get("recon") for output in outputs]),
         ))
     recon_grid = torchvision.utils.make_grid(recon_images,
                                              nrow=4,
                                              normalize=False)
     self.logger.experiment.add_image(f"reconstruction", recon_grid,
                                      self.current_epoch)
     avg_loss = torch.stack([x["val_loss"] for x in outputs]).mean()
     tensorboard_logs = {"loss/val_loss": avg_loss}
     return {"val_loss": avg_loss, "log": tensorboard_logs}
Exemplo n.º 4
0
    def test_keeps_joined_pickups(self):
        # join pickups
        [p.add_collector(self.member) for p in self.series.pickup_dates.all()]

        # change series rule to add another day
        today = self.now.astimezone(self.group.timezone).weekday()
        tomorrow = shift_date_in_local_time(self.now, relativedelta(days=1),
                                            self.group.timezone).astimezone(
                                                self.group.timezone).weekday()
        recurrence = rrule.rrule(
            freq=rrule.WEEKLY,
            byweekday=[
                today,
                tomorrow,
            ],
        )
        series_url = '/api/pickup-date-series/{}/'.format(self.series.id)
        self.client.force_login(user=self.member)
        response = self.client.patch(series_url, {
            'rule': str(recurrence),
        })
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.series.refresh_from_db()

        response = self.client.get('/api/pickup-dates/?series={}'.format(
            self.series.id))
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        # self.assertEqual([parse(p['date'][0]) for p in response.data['results']], [
        #     shift_date_in_local_time(self.series.start_date, delta, self.group.timezone) for delta in (
        #         relativedelta(days=0),
        #         relativedelta(days=1),
        #         relativedelta(days=7),
        #         relativedelta(days=8),
        #         relativedelta(days=14),
        #         relativedelta(days=15),
        #         relativedelta(days=21),
        #         relativedelta(days=22),
        #     )
        # ])
        self.assertEqual(
            [p['collectors'] for p in response.data['results']],
            list(
                interleave(
                    [[self.member.id] for _ in range(4)],
                    [[] for _ in range(4)],
                )),
        )
Exemplo n.º 5
0
    def __init__(self,
                 in_features: int,
                 out_features: int = None,
                 hidden_features: int = None,
                 bias: bool = False,
                 window_sizes: Tuple[int, ...] = (1, 5, 1),
                 negative_slope: float = 0.,
                 inplace: bool = True) -> None:

        if out_features is None:
            out_features = in_features
        if hidden_features is None:
            hidden_features = max(in_features, out_features)

        self.in_features = in_features
        self.hidden_features = hidden_features
        self.out_features = out_features
        self.num_layers = len(window_sizes)

        in_dims = chain([in_features],
                        [hidden_features for _ in window_sizes[:-1]])
        out_dims = chain([hidden_features for _ in window_sizes[:-1]],
                         [out_features])

        super(FeedForwardLayer, self).__init__(*interleave(
            [
                nn.LeakyReLU(negative_slope=negative_slope, inplace=inplace)
                for _ in window_sizes
            ],
            [
                nn.Conv1d(in_channels=in_dims,
                          out_channels=out_dims,
                          bias=bias,
                          kernel_size=window_size,
                          padding=window_size // 2,
                          stride=1) for in_dims, out_dims, window_size in zip(
                              in_dims, out_dims, window_sizes)
            ],
        ))

        self.reset_parameters()
Exemplo n.º 6
0
    if args.fasta_input:
        fasta_file = args.fasta_input

if fasta_file:
    if not args.bits or not args.chopper_bin:
        print(
            "Must specify --bits and --chopper-bin to perform hyperloglog experiments."
        )
        quit()

    print("Building HyperLogLog sketches...")

    proc = subprocess.run([
        args.chopper_bin / "measure_hyperloglog", "-i", fasta_file, "-o",
        args.tsv_file, "-k", args.kmer_size
    ] + list(interleave(["-b"] * len(args.bits), args.bits)),
                          capture_output=True,
                          encoding="utf-8")

    if proc.returncode != 0:
        message = (f"stdout:\n{proc.stdout}\n" f"stderr:\n{proc.stderr}\n")
        print(
            f"measure_hyperloglog failed with the following output:\n{message}"
        )
        quit()

    else:
        print(f"measure_hyperloglog stdout:\n{proc.stdout}\n")
#################################### data analysis ####################################

print("Doing the evaluation...")
Exemplo n.º 7
0
                                 "format": "yyyy-MM-dd'T'HH:mm:ss.SSSSSS"},
                        "long": {"type": "long"},
                        "float": {"type": "float"},
                        "null": {"type": "boolean", "null_value": False}
                    }
                }
            }
        )))

    for chunk in tqdm(chunked(parameter_files, 1000), desc="Uploading..."):
        parameters = [reduce(assign, [
            *(read_json(join(cwd, f['path'])) or []), {"dir": f['dir']}
        ]) for f in chunk]
        actions = [{"index": dict(_id=p['dir'], )} for p in parameters]
        documents = [dict(index=[dict(key=k, **v) for k, v in typify(dot_flatten(p)).items()], **p)
                     for p in parameters]

        # documents[0]

        # https://stackoverflow.com/questions/20288770/how-to-use-bulk-api-to-store-the-keywords-in-es-by-using-python
        response = es.bulk(index='ml-dash', body=interleave(actions, documents))

        if response['errors']:
            for i, item in enumerate(response['items']):
                if item['index']['status'] >= 300:
                    print(item['index'])
                    print(documents[i])
                    break

    cprint('finished', 'green')
Exemplo n.º 8
0
    for chunk in tqdm(chunked(parameter_files, 1000), desc="Uploading..."):
        parameters = [
            reduce(assign, [
                *(read_pickle_for_json(join(cwd, f['path'])) or []), {
                    "dir": f['dir']
                }
            ]) for f in chunk
        ]
        actions = [{"index": dict(_id=p['dir'], )} for p in parameters]
        documents = [
            dict(index=[
                dict(key=k, **v) for k, v in typify(dot_flatten(p)).items()
            ],
                 **p) for p in parameters
        ]

        # documents[0]

        # https://stackoverflow.com/questions/20288770/how-to-use-bulk-api-to-store-the-keywords-in-es-by-using-python
        response = es.bulk(index='ml-dash',
                           body=interleave(actions, documents))

        if response['errors']:
            for i, item in enumerate(response['items']):
                if item['index']['status'] >= 300:
                    print(item['index'])
                    print(documents[i])
                    break

    cprint('finished', 'green')
Exemplo n.º 9
0
 def replace_text(self, pattern, repl, **kwargs):
     objs = interleave(self._keys, self._values)
     objs = _replace_list(objs, pattern, repl, **kwargs)
     self._keys, self._values = map(list, zip(*chunked(objs, 2))) \
             if objs else ([], [])