示例#1
0
 def test_market_with_most_expensive_offer(self):
     m1 = MagicMock(spec=Market)
     o1 = MagicMock(spec=Offer)
     o1.price = 12
     o1.energy = 1
     m2 = MagicMock(spec=Market)
     o2 = MagicMock(spec=Offer)
     o2.price = 12
     o2.energy = 1
     m3 = MagicMock(spec=Market)
     o3 = MagicMock(spec=Offer)
     o3.price = 12
     o3.energy = 1
     markets = OrderedDict()
     markets[DateTime(2018, 1, 1, 12, 0, 0)] = m1
     markets[DateTime(2018, 1, 1, 12, 15, 0)] = m2
     markets[DateTime(2018, 1, 1, 12, 30, 0)] = m3
     self.area._markets = MagicMock(spec=AreaMarkets)
     self.area._markets.markets = markets
     m1.sorted_offers = [o1, o1]
     m2.sorted_offers = [o2, o2]
     m3.sorted_offers = [o3, o3]
     assert self.area.market_with_most_expensive_offer is m1
     o1.price = 19
     o2.price = 20
     o3.price = 18
     assert self.area.market_with_most_expensive_offer is m2
     o1.price = 18
     o2.price = 19
     o3.price = 20
     assert self.area.market_with_most_expensive_offer is m3
示例#2
0
 def next_dagrun_info(
     self,
     *,
     last_automated_data_interval: Optional[DataInterval],
     restriction: TimeRestriction,
 ) -> Optional[DagRunInfo]:
     if last_automated_data_interval is not None:  # There was a previous run on the regular schedule.
         last_start = last_automated_data_interval.start
         last_start_weekday = last_start.weekday()
         if 0 <= last_start_weekday < 4:  # Last run on Monday through Thursday -- next is tomorrow.
             delta = timedelta(days=1)
         else:  # Last run on Friday -- skip to next Monday.
             delta = timedelta(days=(7 - last_start_weekday))
         next_start = DateTime.combine((last_start + delta).date(), Time.min).replace(tzinfo=UTC)
     else:  # This is the first ever run on the regular schedule.
         next_start = restriction.earliest
         if next_start is None:  # No start_date. Don't schedule.
             return None
         if not restriction.catchup:
             # If the DAG has catchup=False, today is the earliest to consider.
             next_start = max(next_start, DateTime.combine(Date.today(), Time.min).replace(tzinfo=UTC))
         elif next_start.time() != Time.min:
             # If earliest does not fall on midnight, skip to the next day.
             next_day = next_start.date() + timedelta(days=1)
             next_start = DateTime.combine(next_day, Time.min).replace(tzinfo=UTC)
         next_start_weekday = next_start.weekday()
         if next_start_weekday in (5, 6):  # If next start is in the weekend, go to next Monday.
             delta = timedelta(days=(7 - next_start_weekday))
             next_start = next_start + delta
     if restriction.latest is not None and next_start > restriction.latest:
         return None  # Over the DAG's scheduled end; don't schedule.
     return DagRunInfo.interval(start=next_start, end=(next_start + timedelta(days=1)))
示例#3
0
def main() -> None:
    """Execute this script to update or create Socrata datasets."""
    command_line_args: Namespace = parse_command_line_args()
    start_time: DateTime = DateTime.now()
    if not command_line_args.envs:
        command_line_args.envs = ENVS
    envs_summary = ", ".join(command_line_args.envs)
    date: Date = command_line_args.date
    plan_year: str = command_line_args.plan_year

    if not plan_year:
        raise Exception("Missing required argument --plan-year")

    # Create untracked datasets if --create-untracked-datasets is specified
    if command_line_args.create_untracked_datasets is True:
        logger.info("Not updating datasets because --create-untracked-datasets was specified")
        if command_line_args.only_file:
            logger.info("Ignoring --only-file because --create-untracked-datasets was specified")
        logger.info(f"Creating untracked datasets using data from {date}, envs: {envs_summary}")
        for env in command_line_args.envs:
            logger.info(f"Loading env: {env}")
            loader = Loader(env)
            loader.create_all_datasets(plan_year, only_untracked=True)
    # Otherwise, just update all datasets (default behavior)
    else:
        logger.info(f"Updating datasets for {date}, envs: {envs_summary}")
        for env in command_line_args.envs:
            logger.info(f"Loading env: {env}")
            loader = Loader(env)
            loader.update_all_datasets(plan_year, date, only_file=command_line_args.only_file)

    time_elapsed: Duration = DateTime.now() - start_time
    logger.info(f"Finished! Time elapsed: {time_elapsed.in_words()}")
示例#4
0
def diff_timeframe(now: pendulum.DateTime,
                   published_datetime: pendulum.DateTime, units: str) -> int:
    """Determine time since data update was published.

    Parameters
    ----------
    now : pendulum.DateTime
        The current datetime.
    published_datetime : pendulum.DateTime
        The datetime for the published update.
    units : str
        Either "minutes", "hours" or "days".

    Returns
    -------
    int
        The amount of time that has elapsed since the dataset was updated.
    """
    if units == "minutes":
        time_since_publish = now.diff(published_datetime).in_minutes()
    if units == "hours":
        time_since_publish = now.diff(published_datetime).in_hours()
    if units == "days":
        time_since_publish = now.diff(published_datetime).in_days()

    return time_since_publish
示例#5
0
def test_flex_time_zone_changes(
        year, month, day, hour, minute, second, microsecond, tzinfo,
        fold, granularity):
    """Test time zone changes for FlexTime class"""
    full_dt_info = DateTimeInfo(
        year, month, day, hour, minute, second, microsecond, tzinfo, fold)
    gval = granularity.value

    dt_info = FlexTime.form_info(
        full_dt_info, granularity=gval, truncate=True, default=False)
    flex_dt = FlexTime(*dt_info)

    dt_info_native = FlexTime.form_info(
        full_dt_info, granularity=gval, truncate=True, default=True,
        tz_instance=True)

    base_dt = DateTime(**dt_info_native._asdict())
    assert flex_dt == base_dt

    flex_dt_utc = flex_dt.astimezone(UTC)
    base_dt_utc = base_dt.astimezone(timezone(UTC))
    assert flex_dt_utc == base_dt_utc
    assert type(flex_dt_utc) is type(flex_dt)

    flex_dt_there_and_back_again = flex_dt_utc.astimezone(tzinfo)
    base_dt_there_and_back_again = base_dt_utc.astimezone(timezone(tzinfo))
    assert flex_dt_there_and_back_again == base_dt_there_and_back_again

    assert flex_dt_there_and_back_again == flex_dt
示例#6
0
def round_to_closest_hour(time_data: pendulum.DateTime) -> pendulum.DateTime:
    if time_data.minute == 0:
        return time_data
    elif time_data.minute > 30:
        return time_data.add(minutes=60 - time_data.minute)
    else:
        return time_data.subtract(minutes=time_data.minute)
示例#7
0
文件: flow_run.py 项目: limx0/prefect
    def get_logs(
        self,
        start_time: pendulum.DateTime = None,
        end_time: pendulum.DateTime = None,
    ) -> List["FlowRunLog"]:
        """
        Get logs for this flow run from `start_time` to `end_time`.

        Args:
            - start_time (optional): A time to start the log query at, useful for
                limiting the scope. If not provided, all logs up to `updated_at` are
                retrieved.
            - end_time (optional): A time to end the log query at. By default, this is
                set to `self.updated_at` which is the last time that the flow run was
                updated in the backend before this object was created.

        Returns:
            A list of `FlowRunLog` objects sorted by timestamp
        """

        client = prefect.Client()
        end_time = end_time or self.updated_at

        logs_query = {
            with_args(
                "logs",
                {
                    "order_by": {EnumValue("timestamp"): EnumValue("asc")},
                    "where": {
                        "_and": [
                            {"timestamp": {"_lte": end_time.isoformat()}},
                            (
                                {"timestamp": {"_gt": start_time.isoformat()}}
                                if start_time
                                else {}
                            ),
                        ]
                    },
                },
            ): {"timestamp": True, "message": True, "level": True}
        }

        result = client.graphql(
            {
                "query": {
                    with_args(
                        "flow_run",
                        {
                            "where": {"id": {"_eq": self.flow_run_id}},
                        },
                    ): logs_query
                }
            }
        )

        # Unpack the result
        logs = result.get("data", {}).get("flow_run", [{}])[0].get("logs", [])

        return [FlowRunLog.from_dict(log) for log in logs]
示例#8
0
 def get_success_order_amount(cls, date: pendulum.DateTime):
     amount = (cls.objects.filter(
         status=cls.STATUS_FINISHED,
         created_at__range=[
             date.start_of("day"),
             date.end_of("day"),
         ],
     ).aggregate(amount=models.Sum("amount"))["amount"] or "0")
     return amount
示例#9
0
 def __find_pend_diff_str(self, pend: pendulum.DateTime):
     cd_diff = pend.diff(pendulum.now())
     (diff_days, diff_hours, diff_minutes) = (cd_diff.days, cd_diff.hours,
                                              cd_diff.minutes)
     if (not pend.is_future()):
         (diff_days, diff_hours, diff_minutes) = (-diff_days, -diff_hours,
                                                  -diff_minutes)
     diff_str = f"{diff_days} days, {diff_hours} hours, {diff_minutes} minutes from now"
     return diff_str
示例#10
0
 def infer_data_interval(self, run_after: DateTime) -> DataInterval:
     weekday = run_after.weekday()
     if weekday in (0, 6):  # Monday and Sunday -- interval is last Friday.
         days_since_friday = (run_after.weekday() - 4) % 7
         delta = timedelta(days=days_since_friday)
     else:  # Otherwise the interval is yesterday.
         delta = timedelta(days=1)
     start = DateTime.combine((run_after - delta).date(), Time.min).replace(tzinfo=UTC)
     return DataInterval(start=start, end=(start + timedelta(days=1)))
示例#11
0
    def save_date(self, feed, date: pendulum.DateTime):
        """
        Save the date for the current event.
        """
        # just in case someone didn't follow the type hints
        if isinstance(date, datetime.datetime):
            date = pendulum.from_timestamp(date.timestamp())

        datafile = os.path.join(self.basepath, f'last.{feed}.dat')
        with open(datafile, 'w') as f:
            f.write(str(date.in_tz('UTC')))
示例#12
0
def get_project_downloads_in_between_dates(project_name: str,
                                           start_date: pendulum.DateTime,
                                           end_date: pendulum.DateTime) -> int:
    query_string = PROJECT_DOWNLOADS_BETWEEN_DATES.format(
        project_name=project_name,
        start_date=start_date.format("YYYYMMDD"),
        end_date=end_date.format("YYYYMMDD"),
        table=settings.BQ_PYPI_DOWNLOADS_TABLE,
    )
    result = _do_downloads_query(query_string)
    if not result:
        return -1
    return result[project_name]
示例#13
0
文件: test_market.py 项目: xg86/d3a
def test_balancing_market_negative_offer_trade(
        market=BalancingMarket(time_slot=DateTime.now())):  # NOQA
    offer = market.balancing_offer(20, -10, 'A', 'A')

    now = DateTime.now(tz=TIME_ZONE)
    trade = market.accept_offer(offer, 'B', time=now, energy=-10)
    assert trade
    assert trade == market.trades[0]
    assert trade.id
    assert trade.time == now
    assert trade.offer is offer
    assert trade.seller == 'A'
    assert trade.buyer == 'B'
示例#14
0
def chunk_date_range(start_date: DateTime, interval=1) -> Iterable[Mapping[str, any]]:
    """
    Returns a list of the beginning and ending timetsamps of each day between the start date and now.
    The return value is a list of dicts {'oldest': float, 'latest': float} which can be used directly with the Slack API
    """
    intervals = []
    now = pendulum.now()
    # Each stream_slice contains the beginning and ending timestamp for a 24 hour period
    while start_date <= now:
        end = start_date.add(days=interval)
        intervals.append({"oldest": start_date.timestamp(), "latest": end.timestamp()})
        start_date = start_date.add(days=1)

    return intervals
示例#15
0
def chunk_date_range(start_date: DateTime) -> Iterable[Mapping[str, any]]:
    """
    Returns a list of each day between the start date and now. Ignore weekends since exchanges don't run on weekends.
    The return value is a list of dicts {'date': date_string}.
    """
    days = []
    now = pendulum.now()
    while start_date < now:
        day_of_week = start_date.day_of_week
        if day_of_week != pendulum.SATURDAY & day_of_week != pendulum.SUNDAY:
            days.append({"date": start_date.to_date_string()})
        start_date = start_date.add(days=1)

    return days
示例#16
0
def get_offsets(
    subreddit: str,
    after: pendulum.DateTime,
    before: pendulum.DateTime,
    sample_size: int,
    PUSHSHIFT_LIMIT: int,
) -> list[pendulum.DateTime]:
    """For sampling, return a set of hourly offsets, beginning near
    after, that should not overlap"""

    duration = before - after
    info(f"{duration.in_days()=}")
    info(f"{duration.in_hours()=}")
    info(f"{duration.in_weeks()=}")
    results_total = get_pushshift_total(subreddit, after, before)
    results_per_hour = math.ceil(results_total / duration.in_hours())
    info(f"{results_per_hour=} on average")

    info(f"{PUSHSHIFT_LIMIT=}")
    info(f"{sample_size=}")
    queries_total = math.ceil(sample_size / PUSHSHIFT_LIMIT)
    info(f"{queries_total=}")
    info(f"{range(duration.in_hours())=}")

    SEEDS_TO_TRY = 300
    seed = int(after.timestamp())
    for seed_counter in range(SEEDS_TO_TRY):
        seed += seed_counter  # increment seed
        warning(f"attempt {seed_counter} to find non-overlapping offsets")
        offsets = get_cacheable_randos(duration.in_hours(), queries_total, seed)
        if is_overlapping(offsets, PUSHSHIFT_LIMIT, results_per_hour):
            critical(f"  seed attempt {seed_counter} failed")
            continue
        else:
            break
    else:
        print(
            f"I exhausted random sets of offsets at {SEEDS_TO_TRY=}"
            f"Quitting because I'm too likely to pull overlapping results"
        )
        raise RuntimeError

    offsets_as_datetime = []
    for offset_as_hour in offsets:
        offset_as_datetime = after.add(hours=offset_as_hour)
        offsets_as_datetime.append(offset_as_datetime)
    info(f"{len(offsets)=}")
    return offsets_as_datetime
示例#17
0
 def due_now(self) -> bool:
     """
     Are we in the range [start_datetime, end_datetime)?
     """
     if not self.start_datetime or not self.end_datetime:
         return False
     return self.start_datetime <= Pendulum.now() < self.end_datetime
示例#18
0
def format_datetime(dt: pendulum.DateTime) -> str:
    """Format date and time by pattern.

    Args:
        dt: DateTime instance to format.
    """
    return dt.format("ddd, MMM D, YYYY HH:mm:ss zz", locale="en")
示例#19
0
    def test_export_unmatched_loads_support_the_all_devices_parameter(self):
        house1 = Area("House1", [self.area1, self.area2])
        self.grid = Area("Grid", [house1, self.area3])
        self.grid._markets.past_markets = {}
        for i in range(1, 11):
            timeslot = DateTime(2018, 1, 1, 12 + i, 0, 0)
            self.strategy1.state.desired_energy_Wh[timeslot] = 100
            self.strategy2.state.desired_energy_Wh[timeslot] = 100
            self.strategy3.state.desired_energy_Wh[timeslot] = 100
            mock_market = MagicMock(spec=Market)
            mock_market.time_slot = timeslot
            mock_market.traded_energy = {
                "load1": -0.101,
                "load2": -0.101,
                "load3": -0.07
            }
            house1._markets.past_markets[timeslot] = mock_market
            self.grid._markets.past_markets[timeslot] = mock_market

        unmatched_loads = export_unmatched_loads(self.grid, all_devices=True)
        print(unmatched_loads)
        assert self.area3.name in unmatched_loads["areas"]
        assert unmatched_loads["areas"][
            self.area3.name]["unmatched_load_count"] == 10
        assert unmatched_loads["unmatched_load_count"] == 10
        assert not unmatched_loads["all_loads_met"]
示例#20
0
    def fetch_access_token(self) -> None:
        """Fetch an access token to obtain Plan Finder data."""
        # Construct request
        url: str = TOKEN_URLS[self.env]
        username: str = USERNAMES[self.env]
        key_id, key_secret = API_KEYS[self.env]
        body = {
            "userName": username,
            "scopes": "mpfpe_pde_full",
            "keyId": key_id,
            "keySecret": key_secret,
        }
        params = {}
        if self.env in ACS_PARAMS:
            params["ACS"] = ACS_PARAMS[self.env]

        # Submit HTTP POST request to obtain token
        logger.info(f"Fetching {self.env} access token")
        response: Response = requests.post(url, json=body, params=params)
        if response.status_code != 200:
            logger.error(Loader._request_details(response))
            raise RuntimeError(f"Failed to fetch token: HTTP status {response.status_code}")

        # Extract token from response
        response_json: dict = response.json()
        access_token: str = response_json["accessToken"]
        expires: int = response_json["expires"]
        self.access_token = access_token
        self.access_token_expires = DateTime.now() + Duration(seconds=expires)
        logger.info(f"Fetched {self.env} access token; expires {self.access_token_expires}")
示例#21
0
    def run(self, resume=False) -> (Period, duration):
        self.sim_status = "running"
        if resume:
            log.critical("Resuming simulation")
            self._info()
        self.is_stopped = False
        while True:
            if resume:
                # FIXME: Fix resume time calculation
                if self.run_start is None or self.paused_time is None:
                    raise RuntimeError("Can't resume without saved state")
                slot_resume, tick_resume = divmod(
                    self.area.current_tick,
                    self.simulation_config.ticks_per_slot)
            else:
                self.run_start = DateTime.now(tz=TIME_ZONE)
                self.paused_time = 0
                slot_resume = tick_resume = 0

            try:
                self._run_cli_execute_cycle(slot_resume, tick_resume) \
                    if self._started_from_cli \
                    else self._execute_simulation(slot_resume, tick_resume)
            except KeyboardInterrupt:
                break
            except SimulationResetException:
                break
            else:
                break
示例#22
0
    def get_articles_since(self,
                           since: pendulum.DateTime) -> List[ArticleData]:
        """Get all new documents"""

        self._verify_token_available()

        resp = self._session.post(
            self._base_url,
            json={
                "query":
                "query($since:DateTime) {"
                "  search(filter: {feed: true, publishedAt: {from: $since}},"
                "         sort: {key: publishedAt}) {"
                "    nodes { entity { ... on Document { meta { title path publishDate } } } }"
                "  }"
                "}",
                "variables": {
                    "since": since.to_rfc3339_string(),
                }
            })

        resp.raise_for_status()
        resp_body = resp.json()

        return [
            ArticleData(
                n["entity"]["meta"]["title"], n["entity"]["meta"]["path"],
                cast(pendulum.DateTime,
                     pendulum.parse(n["entity"]["meta"]["publishDate"])))
            for n in resp_body["data"]["search"]["nodes"]
        ]
示例#23
0
def find_dates(storage: Path):
    # less likely to have Telsa days, since they have to be manually downloaded
    for path in storage.glob('tesla-*.csv'):
        date = DateTime.strptime(path.name, 'tesla-%Y-%m-%d.csv').date()
        octopus = storage / f'octopus-{date}.csv'
        if octopus.exists():
            yield date
示例#24
0
文件: google.py 项目: Tinche/pyrseia
async def request_access_token(
    session: ClientSession,
    url: str,
    creds: ServiceAccountCredentials,
    now: Optional[DateTime] = None,
) -> AccessToken:
    now = now or DateTime.utcnow()
    assertion = encode(
        {
            "iss": creds.client_email,
            "scope": SCOPE_ANDROIDPUBLISHER,
            "aud": creds.token_uri,
            "iat": int(now.timestamp()),
            "exp": int(now.add(hours=1).timestamp()),
        },
        creds.private_key,
        algorithm="RS256",
    ).decode("utf8")

    async with session.post(
            url,
            data={
                "grant_type": ACCESS_TOKEN_GRANT_TYPE,
                "assertion": assertion
            },
    ) as resp:
        payload = await resp.read()
        if resp.status != 200:
            raise HttpError(resp.status, payload)
        return converter.structure(loads(payload), AccessToken)
示例#25
0
    def fetch_zip_file(self, plan_year: str, date: Date = Date.today()) -> Path:
        """Download a Plan Finder zip file for a given date."""
        # If we don't have a current access token, fetch one
        no_access_token = self.access_token is None
        if no_access_token or DateTime.now() > (self.access_token_expires - Duration(minutes=5)):
            self.fetch_access_token()

        # Construct request
        url = DATA_URL
        headers = {
            "X-API-CONSUMER-ID": API_KEYS[self.env][0],
            "Authorization": f"Bearer {self.access_token}",
        }
        params = {"fileName": f"{plan_year}_{date.to_date_string()}"}

        # Submit GET request to download file
        logger.info(f"Fetching {self.env} zip file for plan year {plan_year} and date {date}")
        response = requests.get(url, headers=headers, params=params)
        if not response.status_code == 200:
            raise RuntimeError(
                "Failed to fetch zip file (this may be expected for dates with no data): HTTP "
                f"status {response.status_code}"
            )

        # Save zip file to disk and return its path
        zip_bytes: bytes = response.content
        zip_file_path = DATA_DIR_PATH / f"{self.env}_{date}.zip"
        with open(zip_file_path, "wb") as zip_file:
            zip_file.write(zip_bytes)
        logger.info(f"Fetched {self.env} zip file: {zip_file_path}")
        return zip_file_path
示例#26
0
    def test_export_unmatched_loads_reports_cell_tower_areas(self):
        house1 = Area("House1", [self.area1, self.area2])
        ct_strategy = MagicMock(spec=CellTowerLoadHoursStrategy)
        ct_strategy.state = MagicMock(spec=LoadState)
        ct_strategy.state.desired_energy_Wh = {}
        cell_tower = Area("Cell Tower", strategy=ct_strategy)
        self.grid = Area("Grid", [house1, cell_tower])
        for i in range(1, 11):
            timeslot = DateTime(2018, 1, 1, 12 + i, 0, 0)
            self.strategy1.state.desired_energy_Wh[timeslot] = 100
            self.strategy2.state.desired_energy_Wh[timeslot] = 100

            mock_market = MagicMock(spec=Market)
            mock_market.time_slot = timeslot
            mock_market.traded_energy = {"load1": -0.09, "load2": -0.099}
            house1._markets.past_markets[timeslot] = mock_market

            mock_market_ct = MagicMock(spec=Market)
            mock_market_ct.time_slot = timeslot
            mock_market_ct.traded_energy = {"Cell Tower": -0.4}
            ct_strategy.state.desired_energy_Wh[timeslot] = 1000
            cell_tower._markets.past_markets[timeslot] = mock_market_ct

            self.grid._markets.past_markets[timeslot] = mock_market

        unmatched_loads = export_unmatched_loads(self.grid)
        assert unmatched_loads["unmatched_load_count"] == 30
        assert not unmatched_loads["all_loads_met"]
示例#27
0
 def __init__(self, name):
     self.name = name
     self.current_tick = 10
     self.bc = False
     self.now = DateTime.now()
     DeviceRegistry.REGISTRY = device_registry_dict
     ConstSettings.BalancingSettings.ENABLE_BALANCING_MARKET = True
示例#28
0
def query_pushshift(
    limit: int,
    after: pendulum.DateTime,
    before: pendulum.DateTime,
    subreddit: str,
    query: str = "",
    comments_num: str = ">0",
) -> typ.Any:
    """Given search parameters, query pushshift and return JSON."""

    # https://github.com/pushshift/api

    # no need to pass different limit params beyond 100 (Pushshift's limit)
    # as it creates unnecessary keys in get_JSON cache
    if limit >= PUSHSHIFT_LIMIT:
        limit_param = f"limit={PUSHSHIFT_LIMIT}&"
    else:
        limit_param = f"limit={limit}&"

    after_human = after.format("YYYY-MM-DD HH:mm:ss")
    before_human = before.format("YYYY-MM-DD HH:mm:ss")
    critical(f"******* between {after_human} and {before_human}")
    after_timestamp = after.int_timestamp
    before_timestamp = before.int_timestamp
    debug(f"******* between {after_timestamp} and {before_timestamp}")

    optional_params = ""
    if after:
        optional_params += f"&after={after_timestamp}"
    if before:
        optional_params += f"&before={before_timestamp}"
    if comments_num:
        # I prefer `comments_num`, but Reddit uses poorly
        # named `num_comments`
        optional_params += f"&num_comments={comments_num}"
    # this can be use to remove typ.any message with "removed"
    # see earlier commits for full functionality
    # optional_params += f"&selftext:not=[removed]"

    pushshift_url = (f"https://api.pushshift.io/reddit/submission/search/"
                     f"?{limit_param}subreddit={subreddit}{optional_params}")
    print(f"{pushshift_url=}")
    pushshift_data = web_utils.get_JSON(pushshift_url)["data"]
    if len(pushshift_data) != 100:
        print(f"short on some entries {len(pushshift_data)}")
        # breakpoint()
    return pushshift_data
示例#29
0
文件: api.py 项目: emosyne/crate
def pendulum_to_nlprp_datetime(p: Pendulum, to_utc: bool = True) -> str:
    """
    Converts a :class:`pendulum.Pendulum` to the ISO string format (with
    timezone) used by the NLPRP.
    """
    if to_utc:
        p = convert_datetime_to_utc(p)
    return p.isoformat()
示例#30
0
 def generate_cron_line(self,
                        time: pendulum.DateTime,
                        content: str = "") -> str:
     ts = time.strftime("%Y-%m-%d %H:%M")
     content = f"[{ts}] {content}"
     cron_time = self.gen_cron_time(time)
     fmt = {"title": "Clash of clans", "content": content}
     return cron_time + " " + COMMAND_TEMPLATE.format(**fmt)
示例#31
0
 def calc_traffic_by_datetime(cls,
                              dt: pendulum.DateTime,
                              user_id=None,
                              proxy_node=None):
     """获取指定日期指定用户的流量,只有今天的数据会hit db"""
     if dt.date() == utils.get_current_datetime().date():
         return cls._calc_traffic_by_datetime.uncached(
             cls,
             dt,
             user_id,
             proxy_node.id if proxy_node else None,
         )
     return cls._calc_traffic_by_datetime(
         dt.start_of("day"),
         user_id,
         proxy_node.id if proxy_node else None,
     )