def test_export_import(): log_scale = LogScale(low=-1, high=1, log_base=2) log_scale_export = log_scale.export() assert log_scale_export["width"] == 2 assert log_scale_export["class"] == "LogScale" assert (scale_factory(log_scale.export())) == log_scale linear_scale = Scale(low=1, high=10000) assert (scale_factory(linear_scale.export())) == linear_scale linear_date_scale = TimeScale(low=631152000, high=946684800) assert (scale_factory(linear_date_scale.export())) == linear_date_scale
def test_serialization(): assert hash(Scale(0, 100)) == hash(Scale(0, 100)) assert hash(Scale(0, 100)) != hash(Scale(100, 200)) assert hash(LogScale(0, 100, 10)) == hash(LogScale(0, 100, 10)) assert hash(LogScale(0, 100, 10)) != hash(LogScale(0, 100, 100)) assert hash(TimeScale(946684800, 1592914415)) == hash( TimeScale(946684800, 1592914415)) assert hash(TimeScale(631152000, 1592914415)) != hash( TimeScale(946684800, 1592914415)) assert (hash(LogScale(0, 100, 1)) != hash(Scale(0, 100)) != hash( TimeScale(631152000, 946684800)))
def __init__( self, id: int, metaculus: Any, data: Dict, name=None, ): super().__init__(id, metaculus, data, name) self.scale = TimeScale( self.date_to_timestamp(self.possibilities["scale"]["min"]), self.date_to_timestamp(self.possibilities["scale"]["max"]), )
1.6001953904517954, 2.0558220666186604, 2.5114487427855257, 2.9670754189523905, ] ) densities = np.array( [ 0.05020944540593859, 0.3902426887736647, 0.5887675161478794, 0.19516571803813396, 0.33712516238248535, 0.4151935926066581, 0.16147625748938946, 0.03650993407810862, ] ) return {"xs": xs, "densities": densities} scales_to_test = [ Scale(0, 1), Scale(0, 10000), Scale(-1, 1), LogScale(0.01, 100, 10), LogScale(0.01, 1028, 2), TimeScale(631152000, 946684800), TimeScale(2000, 2051222400), ]