コード例 #1
0
ファイル: test_arithmetic.py プロジェクト: funnycrab/pandas
    def test_datetimeindex_sub_timestamp_overflow(self):
        dtimax = pd.to_datetime(['now', pd.Timestamp.max])
        dtimin = pd.to_datetime(['now', pd.Timestamp.min])

        tsneg = Timestamp('1950-01-01')
        ts_neg_variants = [tsneg,
                           tsneg.to_pydatetime(),
                           tsneg.to_datetime64().astype('datetime64[ns]'),
                           tsneg.to_datetime64().astype('datetime64[D]')]

        tspos = Timestamp('1980-01-01')
        ts_pos_variants = [tspos,
                           tspos.to_pydatetime(),
                           tspos.to_datetime64().astype('datetime64[ns]'),
                           tspos.to_datetime64().astype('datetime64[D]')]

        for variant in ts_neg_variants:
            with pytest.raises(OverflowError):
                dtimax - variant

        expected = pd.Timestamp.max.value - tspos.value
        for variant in ts_pos_variants:
            res = dtimax - variant
            assert res[1].value == expected

        expected = pd.Timestamp.min.value - tsneg.value
        for variant in ts_neg_variants:
            res = dtimin - variant
            assert res[1].value == expected

        for variant in ts_pos_variants:
            with pytest.raises(OverflowError):
                dtimin - variant
コード例 #2
0
ファイル: test_arithmetic.py プロジェクト: zhabzhang/pandas
    def test_datetimeindex_sub_timestamp_overflow(self):
        dtimax = pd.to_datetime(['now', pd.Timestamp.max])
        dtimin = pd.to_datetime(['now', pd.Timestamp.min])

        tsneg = Timestamp('1950-01-01')
        ts_neg_variants = [tsneg,
                           tsneg.to_pydatetime(),
                           tsneg.to_datetime64().astype('datetime64[ns]'),
                           tsneg.to_datetime64().astype('datetime64[D]')]

        tspos = Timestamp('1980-01-01')
        ts_pos_variants = [tspos,
                           tspos.to_pydatetime(),
                           tspos.to_datetime64().astype('datetime64[ns]'),
                           tspos.to_datetime64().astype('datetime64[D]')]

        for variant in ts_neg_variants:
            with pytest.raises(OverflowError):
                dtimax - variant

        expected = pd.Timestamp.max.value - tspos.value
        for variant in ts_pos_variants:
            res = dtimax - variant
            assert res[1].value == expected

        expected = pd.Timestamp.min.value - tsneg.value
        for variant in ts_neg_variants:
            res = dtimin - variant
            assert res[1].value == expected

        for variant in ts_pos_variants:
            with pytest.raises(OverflowError):
                dtimin - variant
コード例 #3
0
    async def _request_bars(
        self,
        bar_type: BarType,
        from_datetime: pd.Timestamp,
        to_datetime: pd.Timestamp,
        limit: int,
        correlation_id: UUID4,
    ) -> None:
        if limit == 0 or limit > 1000:
            limit = 1000

        if bar_type.spec.aggregation == BarAggregation.MINUTE:
            resolution = "m"
        elif bar_type.spec.aggregation == BarAggregation.HOUR:
            resolution = "h"
        elif bar_type.spec.aggregation == BarAggregation.DAY:
            resolution = "d"
        else:  # pragma: no cover (design-time error)
            raise RuntimeError(
                f"invalid aggregation type, "
                f"was {BarAggregationParser.to_str_py(bar_type.spec.aggregation)}",
            )

        start_time_ms = from_datetime.to_datetime64() * 1000 if from_datetime is not None else None
        end_time_ms = to_datetime.to_datetime64() * 1000 if to_datetime is not None else None

        data: List[List[Any]] = await self._http_market.klines(
            symbol=bar_type.instrument_id.symbol.value,
            interval=f"{bar_type.spec.step}{resolution}",
            start_time_ms=start_time_ms,
            end_time_ms=end_time_ms,
            limit=limit,
        )

        bars: List[BinanceBar] = [
            parse_bar_http(
                bar_type,
                values=b,
                ts_init=self._clock.timestamp_ns(),
            )
            for b in data
        ]
        partial: BinanceBar = bars.pop()

        self._handle_bars(bar_type, bars, partial, correlation_id)
コード例 #4
0
ファイル: test_arithmetic.py プロジェクト: jess010/pandas
    def test_timedeltaindex_add_timestamp_nat_masking(self):
        # GH17991 checking for overflow-masking with NaT
        tdinat = pd.to_timedelta(['24658 days 11:15:00', 'NaT'])

        tsneg = Timestamp('1950-01-01')
        ts_neg_variants = [tsneg,
                           tsneg.to_pydatetime(),
                           tsneg.to_datetime64().astype('datetime64[ns]'),
                           tsneg.to_datetime64().astype('datetime64[D]')]

        tspos = Timestamp('1980-01-01')
        ts_pos_variants = [tspos,
                           tspos.to_pydatetime(),
                           tspos.to_datetime64().astype('datetime64[ns]'),
                           tspos.to_datetime64().astype('datetime64[D]')]

        for variant in ts_neg_variants + ts_pos_variants:
            res = tdinat + variant
            assert res[1] is pd.NaT
コード例 #5
0
    def test_timedeltaindex_add_timestamp_nat_masking(self):
        # GH17991 checking for overflow-masking with NaT
        tdinat = pd.to_timedelta(['24658 days 11:15:00', 'NaT'])

        tsneg = Timestamp('1950-01-01')
        ts_neg_variants = [tsneg,
                           tsneg.to_pydatetime(),
                           tsneg.to_datetime64().astype('datetime64[ns]'),
                           tsneg.to_datetime64().astype('datetime64[D]')]

        tspos = Timestamp('1980-01-01')
        ts_pos_variants = [tspos,
                           tspos.to_pydatetime(),
                           tspos.to_datetime64().astype('datetime64[ns]'),
                           tspos.to_datetime64().astype('datetime64[D]')]

        for variant in ts_neg_variants + ts_pos_variants:
            res = tdinat + variant
            assert res[1] is pd.NaT
コード例 #6
0
ファイル: test_timestamp.py プロジェクト: hvardhan20/pandas
    def test_to_numpy_alias(self):
        # GH 24653: alias .to_numpy() for scalars
        ts = Timestamp(datetime.now())
        assert ts.to_datetime64() == ts.to_numpy()

        # GH#44460
        msg = "dtype and copy arguments are ignored"
        with pytest.raises(ValueError, match=msg):
            ts.to_numpy("M8[s]")
        with pytest.raises(ValueError, match=msg):
            ts.to_numpy(copy=True)
コード例 #7
0
    def test_conversion(self):
        # GH#9255
        ts = Timestamp('2000-01-01')

        result = ts.to_pydatetime()
        expected = datetime(2000, 1, 1)
        assert result == expected
        assert type(result) == type(expected)

        result = ts.to_datetime64()
        expected = np.datetime64(ts.value, 'ns')
        assert result == expected
        assert type(result) == type(expected)
        assert result.dtype == expected.dtype
コード例 #8
0
    def test_conversion(self):
        # GH#9255
        ts = Timestamp('2000-01-01')

        result = ts.to_pydatetime()
        expected = datetime(2000, 1, 1)
        assert result == expected
        assert type(result) == type(expected)

        result = ts.to_datetime64()
        expected = np.datetime64(ts.value, 'ns')
        assert result == expected
        assert type(result) == type(expected)
        assert result.dtype == expected.dtype
コード例 #9
0
 def test_to_numpy_alias(self):
     # GH 24653: alias .to_numpy() for scalars
     ts = Timestamp(datetime.now())
     assert ts.to_datetime64() == ts.to_numpy()
コード例 #10
0
def from_timestamp_to_datetime64(ts: pd.Timestamp) -> np.datetime64:
    """Convert from timestamp to datetime64"""
    return ts.to_datetime64()
コード例 #11
0
ファイル: test_timestamp.py プロジェクト: clham/pandas
 def test_to_numpy_alias(self):
     # GH 24653: alias .to_numpy() for scalars
     ts = Timestamp(datetime.now())
     assert ts.to_datetime64() == ts.to_numpy()
コード例 #12
0
    def compute(self,
                nprocs,
                ndayagg: int = 1,
                method: str = 'mean',
                firstday: pd.Timestamp = None,
                rolling: bool = False):
        """
        Will call workers to aggregate a number of days, starting at the first index, stamped left, and moving one step further in case of rolling. And in case of non-rolling, starting with at the first index, stamping left and jumping further
        One also has to option to determine the starting index manually with the firstday argument
        Will not use multiprocessing when the desired nprocs is only one
        """
        try:
            which_first = int(
                np.where(self.coords['time'] == firstday.to_datetime64())[0]
            )  # Returns a tuple of indices (first row then column) but we only have the first dimension
            logging.debug(
                f'TimeAggregator found firstday {firstday} at location {which_first} to start aggregation, rolling = {rolling}'
            )
        except AttributeError:
            which_first = 0
            logging.debug(
                f'TimeAggregator found no firstday, aggregation will start at location 0, rolling = {rolling}'
            )
        if rolling:
            # Slicing off the end where not enough days are present to aggregate
            time_axis_indices = np.arange(which_first,
                                          self.shape[0] - ndayagg + 1, 1)
        else:
            time_axis_indices = np.arange(which_first,
                                          self.shape[0] - ndayagg + 1, ndayagg)

        if nprocs > 1:
            with mp.Pool(processes=nprocs,
                         initializer=init_worker,
                         initargs=(self.inarray, self.share_input, self.dtype,
                                   self.shape, self.doyaxis, None,
                                   self.outarray, ndayagg, method)) as pool:
                pool.map(aggregate_at, time_axis_indices)
        else:  # Just a sequential loop, still use of a shared memory array
            init_worker(self.inarray, self.share_input, self.dtype, self.shape,
                        self.doyaxis, None, self.outarray, ndayagg, method)
            for timeindex in time_axis_indices:
                aggregate_at(timeindex)

        # Reconstruction from shared out array
        np_outarray = np.frombuffer(self.outarray, dtype=self.dtype).reshape(
            self.shape)[time_axis_indices, ...]
        coords = dict(self.coords)
        coords['time'] = coords['time'][time_axis_indices]
        result = xr.DataArray(np_outarray,
                              dims=self.dims,
                              coords=coords,
                              name='-'.join([
                                  self.name,
                                  str(ndayagg),
                                  'roll' if rolling else 'nonroll', method
                              ]))
        result.attrs = self.attrs
        logging.info(
            f'TimeAggregator added coordinates and attributes to aggregated outarray with shape {result.shape} and will return as xr.DataArray with dtype {result.dtype}'
        )
        Computer.cleanup(self)
        return result