示例#1
0
def historical_kline(symbol: str,
                     start: Timestamp,
                     end: Timestamp,
                     period: str,
                     type: str = "before",
                     batch: int = 100):
    start_epoch = int(start.timestamp() * 1000)
    end_epoch = int(end.timestamp() * 1000)
    estimated_bar_num = ((end - start).days + 1) * BAR_MULTI[period]
    all_dfs = []
    while True:
        query = f"&symbol={symbol}&begin={start_epoch}&period={period}&type={type}&count={min(batch, estimated_bar_num)}"
        url = kline_base + query
        raw = fetch(url)
        df_tmp = DataFrame(raw['item'], columns=raw['column'])
        start_epoch = df_tmp.timestamp.max()
        estimated_bar_num -= len(df_tmp)
        all_dfs.append(df_tmp)
        if start_epoch > end_epoch:
            break

    df = concat(all_dfs)
    df.timestamp = to_datetime(df.timestamp)
    df.set_index("timestamp", inplace=True)

    return df
示例#2
0
    def test_replace_tzinfo(self):
        # GH#15683
        dt = datetime(2016, 3, 27, 1)
        tzinfo = pytz.timezone('CET').localize(dt, is_dst=False).tzinfo

        result_dt = dt.replace(tzinfo=tzinfo)
        result_pd = Timestamp(dt).replace(tzinfo=tzinfo)

        if PY3:
            # datetime.timestamp() converts in the local timezone
            with tm.set_timezone('UTC'):
                assert result_dt.timestamp() == result_pd.timestamp()

        assert result_dt == result_pd
        assert result_dt == result_pd.to_pydatetime()

        result_dt = dt.replace(tzinfo=tzinfo).replace(tzinfo=None)
        result_pd = Timestamp(dt).replace(tzinfo=tzinfo).replace(tzinfo=None)

        if PY3:
            # datetime.timestamp() converts in the local timezone
            with tm.set_timezone('UTC'):
                assert result_dt.timestamp() == result_pd.timestamp()

        assert result_dt == result_pd
        assert result_dt == result_pd.to_pydatetime()
示例#3
0
    def test_replace_tzinfo(self):
        # GH#15683
        dt = datetime(2016, 3, 27, 1)
        tzinfo = pytz.timezone('CET').localize(dt, is_dst=False).tzinfo

        result_dt = dt.replace(tzinfo=tzinfo)
        result_pd = Timestamp(dt).replace(tzinfo=tzinfo)

        if PY3:
            # datetime.timestamp() converts in the local timezone
            with tm.set_timezone('UTC'):
                assert result_dt.timestamp() == result_pd.timestamp()

        assert result_dt == result_pd
        assert result_dt == result_pd.to_pydatetime()

        result_dt = dt.replace(tzinfo=tzinfo).replace(tzinfo=None)
        result_pd = Timestamp(dt).replace(tzinfo=tzinfo).replace(tzinfo=None)

        if PY3:
            # datetime.timestamp() converts in the local timezone
            with tm.set_timezone('UTC'):
                assert result_dt.timestamp() == result_pd.timestamp()

        assert result_dt == result_pd
        assert result_dt == result_pd.to_pydatetime()
示例#4
0
def _define_time_coord(
    adate: pd.Timestamp,
    time_bounds: Optional[Sequence[pd.Timestamp]] = None,
) -> DimCoord:
    """Define a time coordinate. The coordinate will have bounds,
    if bounds are provided.

    Args:
        adate:
            The point for the time coordinate.
        time_bounds:
            The values defining the bounds for the time coordinate.

    Returns:
        A time coordinate. This coordinate will have bounds, if bounds
        are provided.
    """
    return DimCoord(
        np.array(adate.timestamp(), dtype=TIME_COORDS["time"].dtype),
        "time",
        bounds=time_bounds if time_bounds is None else [
            np.array(t.timestamp(), dtype=TIME_COORDS["time"].dtype)
            for t in time_bounds
        ],
        units=TIME_COORDS["time"].units,
    )
示例#5
0
 def get_kpi_day(self, station_code: str, date: pd.Timestamp) -> Dict:
     url = f'{self.base_url}/getKpiStationDay'
     time = int(date.timestamp()) * 1000
     body = {'stationCodes': station_code, 'collectTime': time}
     r = self.session.post(url=url, json=body)
     self._validate_response(r)
     return r.json()
示例#6
0
def finance_report(start: Timestamp,
                   end: Timestamp,
                   market: str,
                   symbol: str,
                   report_type: str,
                   quarter="all") -> DataFrame:
    """

    :param start: start time
    :param end: end time
    :param market: {'HK', 'CN'}
    :param symbol: stock symbol
    :param report_type: {'indicator', 'balance', 'income', 'business'}
    :param quarter: {'all', 'Q1', 'Q2', ‘Q3', 'Q4'}
    :return: data frame contains items of financial report
    """
    count = (end.to_period(freq='Q') - start.to_period(freq='Q')).n
    end_timestamp = int(end.timestamp() * 1000)
    urlpath = f"{market}/{report_type}.json?symbol={symbol}&&type={quarter}" \
              f"&is_detail=true&count={count}&timestamp={end_timestamp}"
    url = urljoin(api_ref.finance_base, urlpath)
    data = utls.fetch(url)
    data_list = data.pop('list')
    for d in data_list:
        for k in d:
            if isinstance(d[k], list):
                d[k] = d[k][0]
    df = DataFrame(data_list).drop(columns=['ctime']).rename(
        columns={
            'report_date': 'date'
        }).set_index('date')
    df.date = df.date.astype('M8[ms]')
    df.report_name = df.report_name.str.replace('年报', 'Q4').str.replace('三季报', 'Q3')\
        .str.replace('中报', 'Q2').str.replace('一季报', 'Q1')
    return df
示例#7
0
 def get_station_kpi_year(self, station_code: str,
                          date: pd.Timestamp) -> Dict:
     time = int(date.timestamp()) * 1000
     return self._request("getKpiStationYear", {
         'stationCodes': station_code,
         'collectTime': time
     })
示例#8
0
 def get_dev_kpi_year(self, dev_id: str, dev_type_id: int,
                      date: pd.Timestamp) -> Dict:
     time = int(date.timestamp()) * 1000
     return self._request("getDevKpiYear", {
         'devIds': dev_id,
         'devTypeId': dev_type_id,
         'collectTime': time
     })
示例#9
0
    def generate_requests(self, timestamp: pd.Timestamp):

        month = timestamp.month if timestamp.month in self.monthly_vals else 0
        seconds_per_time_unit = self.__class__.SECONDS_IN_DAY // len(
            self.monthly_vals[month][timestamp.weekday()])
        time_unit = (int(timestamp.timestamp()) %
                     self.__class__.SECONDS_IN_DAY) // seconds_per_time_unit

        self._populate_split_across_seconds_if_needed(timestamp, month,
                                                      seconds_per_time_unit,
                                                      time_unit)

        second_in_time_unit = int(timestamp.timestamp(
        )) % seconds_per_time_unit  # TODO: is it correct?
        self._populate_simulation_steps_in_second_if_needed(
            second_in_time_unit)

        return self._generate_requests_on_current_simulation_step(timestamp)
示例#10
0
async def task():
    global last_tweet_used_id, last_tweet_used_bap_id, start_time, start_ts

    await bot.wait_until_ready()

    for chan in config['live_scores_channels']:
        c = bot.get_channel(chan)
        if c:
            live_scores_channel.append(c)
            print('Added {}@{} for live scores'.format(c.name, 'FantasyPL'))
        else:
            print('Couldn\'t find channel {}'.format(chan))

    for chan in config['price_changes_channels']:
        c = bot.get_channel(chan)
        if c:
            price_changes_channel.append(c)
            print('Added {}@{} for price changes'.format(c.name, 'FantasyPL'))
        else:
            print('Couldn\'t find channel {}'.format(chan))

    for chan in config['team_news_channels']:
        c = bot.get_channel(chan)
        if c:
            team_news_channel.append(c)
            print('Added {}@{} for team news'.format(c.name, 'FantasyPL'))
        else:
            print('Couldn\'t find channel {}'.format(chan))

    for chan in config['stats_channels']:
        c = bot.get_channel(chan)
        if c:
            stats_channel.append(c)
            print('Added {}@{} for stats'.format(c.name, 'FantasyPL'))
        else:
            print('Couldn\'t find channel {}'.format(chan))

    start_time = Timestamp(get_ntp_time(), unit="s", tz="UTC")
    start_ts = int(start_time.timestamp())
    last_tweet_used_id = None
    last_tweet_used_bap_id = None

    async with aiohttp.ClientSession(connector=conn) as session:
        await get_all_fixtures(session)

        print("Ready.")

        while True:
            await asyncio.gather(
                #get_latest_fixture_tweets(session),
                get_latest_tweets(),
                get_latest_tweets_bap(),
                asyncio.sleep(sleep_time_seconds))
            await bot.wait_until_ready()
def generate_date_feature(date: pd.Timestamp):
    """
    Generate date features.
    """
    result = pd.Series()
    result['timestamp'] = date.timestamp()

    format_string = '%Y-%m-%d'
    dt_string = date.strftime(format_string)
    result['holiday'] = int(
        (dt_string in holiday)
        or (date.weekday() in [5, 6] and dt_string not in work))

    result = pd.concat(
        [result, get_onehot(date.weekday(), 0, 6, name='weekday')])
    return result
示例#12
0
    def test_timestamp(self):
        # GH#17329
        # tz-naive --> treat it as if it were UTC for purposes of timestamp()
        ts = Timestamp.now()
        uts = ts.replace(tzinfo=utc)
        assert ts.timestamp() == uts.timestamp()

        tsc = Timestamp("2014-10-11 11:00:01.12345678", tz="US/Central")
        utsc = tsc.tz_convert("UTC")

        # utsc is a different representation of the same time
        assert tsc.timestamp() == utsc.timestamp()

        # datetime.timestamp() converts in the local timezone
        with tm.set_timezone("UTC"):
            # should agree with datetime.timestamp method
            dt = ts.to_pydatetime()
            assert dt.timestamp() == ts.timestamp()
示例#13
0
    def test_timestamp(self):
        # GH#17329
        # tz-naive --> treat it as if it were UTC for purposes of timestamp()
        ts = Timestamp.now()
        uts = ts.replace(tzinfo=utc)
        assert ts.timestamp() == uts.timestamp()

        tsc = Timestamp('2014-10-11 11:00:01.12345678', tz='US/Central')
        utsc = tsc.tz_convert('UTC')

        # utsc is a different representation of the same time
        assert tsc.timestamp() == utsc.timestamp()

        # datetime.timestamp() converts in the local timezone
        with tm.set_timezone('UTC'):
            # should agree with datetime.timestamp method
            dt = ts.to_pydatetime()
            assert dt.timestamp() == ts.timestamp()
示例#14
0
 def test_timestamp(self, dt64, ts):
     alt = Timestamp(dt64)
     assert ts.timestamp() == alt.timestamp()
示例#15
0
    async def _request_bars(  # noqa C901 'FTXDataClient._request_bars' is too complex (11)
        self,
        bar_type: BarType,
        from_datetime: pd.Timestamp,
        to_datetime: pd.Timestamp,
        limit: int,
        correlation_id: UUID4,
    ):
        instrument = self._instrument_provider.find(bar_type.instrument_id)
        if instrument is None:
            self._log.error(
                f"Cannot parse historical bars: "
                f"no instrument found for {bar_type.instrument_id}.", )
            return

        if bar_type.spec.aggregation == BarAggregation.SECOND:
            resolution = bar_type.spec.step
        elif bar_type.spec.aggregation == BarAggregation.MINUTE:
            resolution = bar_type.spec.step * 60
        elif bar_type.spec.aggregation == BarAggregation.HOUR:
            resolution = bar_type.spec.step * 60 * 60
        elif bar_type.spec.aggregation == BarAggregation.DAY:
            resolution = bar_type.spec.step * 60 * 60 * 24
        else:  # pragma: no cover (design-time error)
            raise RuntimeError(
                f"invalid aggregation type, "
                f"was {BarAggregationParser.to_str_py(bar_type.spec.aggregation)}",
            )

        # Define validation constants
        max_seconds: int = 30 * 86400
        valid_windows: List[int] = [15, 60, 300, 900, 3600, 14400, 86400]

        # Validate resolution
        if resolution > max_seconds:
            self._log.error(
                f"Cannot request bars for {bar_type}: "
                f"seconds window exceeds MAX_SECONDS {max_seconds}.", )
            return

        if resolution > 86400 and resolution % 86400 != 0:
            self._log.error(
                f"Cannot request bars for {bar_type}: "
                f"seconds window exceeds 1 day 86,400 and not a multiple of 1 day.",
            )
            return
        elif resolution not in valid_windows:
            self._log.error(
                f"Cannot request bars for {bar_type}: "
                f"invalid seconds window, use one of {valid_windows}.", )
            return

        # Get historical bars data
        data: List[Dict[str,
                        Any]] = await self._http_client.get_historical_prices(
                            market=bar_type.instrument_id.symbol.value,
                            resolution=resolution,
                            start_time=int(from_datetime.timestamp())
                            if from_datetime is not None else None,
                            end_time=int(to_datetime.timestamp())
                            if to_datetime is not None else None,
                        )

        # Limit bars data
        if limit:
            while len(data) > limit:
                data.pop(0)  # Pop left

        bars: List[Bar] = parse_bars_http(
            instrument=instrument,
            bar_type=bar_type,
            data=data,
            ts_event_delta=secs_to_nanos(resolution),
            ts_init=self._clock.timestamp_ns(),
        )
        partial: Bar = bars.pop()

        self._handle_bars(bar_type, bars, partial, correlation_id)
示例#16
0
    def _worker(self, exchange):
        r = Rest()
        storage = Storage(self.config)
        for pair in self.config.backfill[exchange]:
            try:
                start = self.config.backfill[exchange][pair].start

                while True:
                    end = storage.get_start_date(exchange, 'trades', pair)
                    if not all(e == end[0] for e in end):
                        raise InconsistentStorage(
                            "Stored data differs, cannot backfill")
                    end = end[0]
                    if end:
                        break
                    time.sleep(10)
                end = Timestamp(end, unit='s')
                end -= Timedelta(microseconds=1)
                start = Timestamp(start)
                if end <= Timestamp(start):
                    LOG.info(
                        "Data in storage is earlier than backfill start date for %s - %s",
                        exchange, pair)
                    continue

                LOG.info("Backfill - Starting for %s - %s for range %s - %s",
                         exchange, pair, start, str(end))

                # Backfill from end date to start date, 1 day at a time, in reverse order (from end -> start)
                while start < end:
                    seg_start = end.replace(hour=0,
                                            minute=0,
                                            second=0,
                                            microsecond=0,
                                            nanosecond=0)
                    if start > seg_start:
                        seg_start = start
                    LOG.info("Backfill - Reading %s to %s for %s - %s",
                             seg_start, end, exchange, pair)

                    trades = []
                    try:
                        for t in r[exchange].trades(pair, str(seg_start),
                                                    str(end)):
                            trades.extend(t)
                    except Exception:
                        LOG.warning(
                            "Backfill - encountered error backfilling %s - %s, trying again...",
                            exchange,
                            pair,
                            exc_info=True)
                        time.sleep(300)
                        continue

                    if not trades:
                        end = seg_start - Timedelta(nanoseconds=1)
                        continue

                    storage.aggregate(trades)
                    storage.write(exchange, 'trades', pair, end.timestamp())
                    LOG.info("Backfill - Wrote %s to %s for %s - %s",
                             seg_start, end, exchange, pair)
                    end = seg_start - Timedelta(nanoseconds=1)
                LOG.info("Backfill for %s - %s completed", exchange, pair)
            except Exception:
                LOG.error("Backfill failed for %s - %s",
                          exchange,
                          pair,
                          exc_info=True)