def test_fetch_auth_tokens__raises_Abort_when_it_times_out_waiting_for_the_user( respx_mock, dummy_context, mocker): """ Validate that the ``fetch_auth_tokens()`` function will raise an Abort if the time runs out before a user completes the login process. """ respx_mock.post(f"{LOGIN_DOMAIN}/oauth/device/code").mock( return_value=httpx.Response( httpx.codes.OK, json=dict( device_code="dummy-code", verification_uri_complete="https://dummy-uri.com", interval=0, ), ), ) respx_mock.post(f"{LOGIN_DOMAIN}/oauth/token").mock( return_value=httpx.Response( httpx.codes.BAD_REQUEST, json=dict(error="authorization_pending")), ) one_tick = Tick(counter=1, elapsed=pendulum.Duration(seconds=1), total_elapsed=pendulum.Duration(seconds=1)) mocker.patch("jobbergate_cli.auth.TimeLoop", return_value=[one_tick]) with pytest.raises(Abort, match="not completed in time"): fetch_auth_tokens(dummy_context)
def stream_slices( self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: """ The `stream_slices` implements iterator functionality for `site_urls` and `searchType`. The user can pass many `site_url`, and we have to process all of them, we can also pass the` searchType` parameter in the `request body` to get data using some` searchType` value from [` web`, `news `,` image`, `video`]. It's just a double nested loop with a yield statement. """ for site_url in self._site_urls: for search_type in self.search_types: start_date = self._get_start_date(stream_state, site_url, search_type) end_date = self._get_end_date() if start_date > end_date: start_date = end_date next_start = start_date period = pendulum.Duration(days=self.range_of_days - 1) while next_start <= end_date: next_end = min(next_start + period, end_date) yield { "site_url": site_url, "search_type": search_type, "start_date": next_start.to_date_string(), "end_date": next_end.to_date_string(), } # add 1 day for the next slice's start date not to duplicate data from previous slice's end date. next_start = next_end + pendulum.Duration(days=1)
def test_pack_header_for_user(self, app, user_class): """ This test verifies that the pack_header_for_user method can be used to package a token into a header dict for a specified user """ guard = Praetorian(app, user_class) the_dude = user_class( username='******', password=guard.encrypt_password('abides'), roles='admin,operator', ) moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): header_dict = guard.pack_header_for_user(the_dude) token_header = header_dict.get(DEFAULT_JWT_HEADER_NAME) assert token_header is not None token = token_header.replace(DEFAULT_JWT_HEADER_TYPE, '') token = token.strip() token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['iat'] == moment.int_timestamp assert token_data['exp'] == ( moment + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN)).int_timestamp assert token_data['rf_exp'] == (moment + pendulum.Duration( **DEFAULT_JWT_REFRESH_LIFESPAN)).int_timestamp assert token_data['id'] == the_dude.id assert token_data['rls'] == 'admin,operator' moment = pendulum.parse('2017-05-21 18:39:55') override_access_lifespan = pendulum.Duration(minutes=1) override_refresh_lifespan = pendulum.Duration(hours=1) with freezegun.freeze_time(moment): header_dict = guard.pack_header_for_user( the_dude, override_access_lifespan=override_access_lifespan, override_refresh_lifespan=override_refresh_lifespan, ) token_header = header_dict.get(DEFAULT_JWT_HEADER_NAME) assert token_header is not None token = token_header.replace(DEFAULT_JWT_HEADER_TYPE, '') token = token.strip() token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['exp'] == ( moment + override_access_lifespan).int_timestamp assert token_data['rf_exp'] == ( moment + override_refresh_lifespan).int_timestamp assert token_data['id'] == the_dude.id
def streams(self, config: Mapping[str, Any]) -> List[Stream]: authenticator = TokenAuthenticator(config["api_token"]) default_start_date = pendulum.parse(config["start_date"]) threads_lookback_window = pendulum.Duration( days=config["lookback_window"]) streams = [ Channels(authenticator=authenticator), ChannelMembers(authenticator=authenticator), ChannelMessages(authenticator=authenticator, default_start_date=default_start_date), Threads(authenticator=authenticator, default_start_date=default_start_date, lookback_window=threads_lookback_window), Users(authenticator=authenticator), ] # To sync data from channels, the bot backed by this token needs to join all those channels. This operation is idempotent. if config["join_channels"]: logger = AirbyteLogger() logger.info("joining Slack channels") join_channels_stream = JoinChannelsStream( authenticator=authenticator) for stream_slice in join_channels_stream.stream_slices(): for message in join_channels_stream.read_records( sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): logger.info(message["message"]) return streams
def test_update_job_expired(self, started_job, adreport, mocker): mocker.patch.object(started_job, "job_timeout", new=pendulum.Duration()) started_job.update_job() assert started_job.failed
def login(credentials: LoginSchema, jwt: JWT) -> str: """ View d'authentification Args: credentials: credentials username/password jwt: JWT componement pour l'encodage du payload Toutes les erreurs "raise" Returns: token """ user = User.get(username=credentials["username"]) if not user or not user.check_password(credentials["password"]): raise exceptions.Forbidden("Incorrect username or password.") if not user.actif: raise exceptions.Forbidden("Utilisateur inactif") payload = { "id": user.id, "username": user.username, "iat": pendulum.now(), "exp": pendulum.now() + pendulum.Duration(seconds=1000), } token = jwt.encode(payload) if token is None: raise exceptions.ConfigurationError("échec de l'encodage jwt") return token
async def _start_recording_task(self): """Start the asynchronous recording task. Algorithm: 1. Collect datapoints from the Arduino for 1 minute and store the datapoints into a list. After 1 minute, we should have 20-30 datapoints. 2. Store the following into a database: - Timestamp - Median temperature - Median humidity - Is the humidifier on The goal is to store 1 datapoint a minute, so out of all those datapoints, only 1 gets saved. 3. Erase all the datapoints. 4. Go to step 1. """ try: while True: # This sleep is important because it allows # views.start_recording to return a response right away. await asyncio.sleep(1) duration = pendulum.Duration(minutes=1) datapoints = await self._read_datapoints(duration) chosen_datapoint = await self._process_datapoints(datapoints) await self._write_to_database(chosen_datapoint) except asyncio.CancelledError: log.info(f'Cancelling task.') raise except Exception as e: log.error(f'Exception: {e}') raise finally: self.recording_task = None
def make_datetime_ranges( start: DateTime, end: DateTime, range_days: int) -> Iterable[Tuple[DateTime, DateTime]]: """ Generates list of ranges starting from start up to end date with duration of ranges_days. Args: start (DateTime): start of the range end (DateTime): end of the range range_days (int): Number in days to split subranges into. Returns: List[Tuple[DateTime, DateTime]]: list of tuples with ranges. Each tuple contains two daytime variables: first is period start and second is period end. """ if start > end: return [] next_start = start period = pendulum.Duration(days=range_days) while next_start < end: next_end = min(next_start + period, end) yield next_start, next_end next_start = next_end
def test_email_stream(catalog, time_mock): DAYS_DURATION = 100 DAYS_PER_MINUTE_RATE = 8 time_mock.move_to( pendulum.parse(TEST_START_DATE) + pendulum.Duration(days=DAYS_DURATION)) ranges: List[int] = [] def response_cb(req): days = get_range_days_from_request(req) ranges.append(days) time_mock.tick(delta=datetime.timedelta(minutes=days / DAYS_PER_MINUTE_RATE)) return (200, {}, json.dumps({"createdAt": "2020"})) responses.add_callback("GET", "https://api.iterable.com/api/export/data.json", callback=response_cb) records = read_from_source(catalog) assert records assert sum(ranges) == DAYS_DURATION assert len(responses.calls) == len(ranges) assert ranges == [ AdjustableSliceGenerator.INITIAL_RANGE_DAYS, *([ int(DAYS_PER_MINUTE_RATE / AdjustableSliceGenerator.REQUEST_PER_MINUTE_LIMIT) ] * 35), ]
def test_email_stream_chunked_encoding(catalog, days_duration, days_per_minute_rate, time_mock): time_mock.move_to( pendulum.parse(TEST_START_DATE) + pendulum.Duration(days=days_duration)) ranges: List[int] = [] encoding_throw = 0 def response_cb(req): nonlocal encoding_throw # Every request fails with 2 ChunkedEncodingError exception but works well on third time. if encoding_throw < 2: encoding_throw += 1 raise ChunkedEncodingError() encoding_throw = 0 days = get_range_days_from_request(req) ranges.append(days) time_mock.tick(delta=datetime.timedelta(minutes=days / days_per_minute_rate)) return (200, {}, json.dumps({"createdAt": "2020"})) responses.add_callback("GET", "https://api.iterable.com/api/export/data.json", callback=response_cb) records = read_from_source(catalog) assert sum(ranges) == days_duration assert len(ranges) == len(records) assert len(responses.calls) == 3 * len(ranges)
def test_auth_required(self, client, default_guard): """ This test verifies that the @auth_required decorator can be used to ensure that any access to a protected endpoint must have a properly structured auth header including a valid jwt token. Otherwise, a 401 error occurs with an informative error message. """ # Token is not in header response = client.get( '/protected', headers={}, ) assert ( "JWT token not found" in response.json['message'] ) assert response.status_code == 401 # Token has invalid structure response = client.get( '/protected', headers={'Authorization': 'bad_structure iamatoken'}, ) assert ( "JWT header structure is invalid" in response.json['message'] ) assert response.status_code == 401 # Token is expired moment = pendulum.parse('2017-05-24 10:18:45') with freezegun.freeze_time(moment): headers = default_guard.pack_header_for_user(self.the_dude) moment = ( moment + default_guard.access_lifespan + pendulum.Duration(seconds=1) ) with freezegun.freeze_time(moment): response = client.get( '/protected', headers=headers, ) assert response.status_code == 401 assert ( "access permission has expired" in response.json['message'] ) # Token is present and valid moment = pendulum.parse('2017-05-24 10:38:45') with freezegun.freeze_time(moment): response = client.get( '/protected', headers=default_guard.pack_header_for_user(self.the_dude), ) assert response.status_code == 200
def test_deletes_patient_task_schedule(self) -> None: schedule = TaskSchedule() schedule.group_id = self.group.id self.dbsession.add(schedule) self.dbsession.flush() item = TaskScheduleItem() item.schedule_id = schedule.id item.task_table_name = "ace3" item.due_from = pendulum.Duration(days=30) item.due_by = pendulum.Duration(days=60) self.dbsession.add(item) self.dbsession.flush() patient = self.create_patient() pts = PatientTaskSchedule() pts.schedule_id = schedule.id pts.patient_pk = patient.pk self.dbsession.add(pts) self.dbsession.commit() self.assertIsNotNone( self.dbsession.query(TaskSchedule).filter( TaskSchedule.id == schedule.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(TaskScheduleItem).filter( TaskScheduleItem.id == item.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(PatientTaskSchedule).filter( PatientTaskSchedule.id == pts.id).one_or_none()) self.dbsession.delete(patient) self.dbsession.commit() self.assertIsNotNone( self.dbsession.query(TaskSchedule).filter( TaskSchedule.id == schedule.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(TaskScheduleItem).filter( TaskScheduleItem.id == item.id).one_or_none()) self.assertIsNone( self.dbsession.query(PatientTaskSchedule).filter( PatientTaskSchedule.id == pts.id).one_or_none())
def test_auth_required(self, client, default_guard, use_cookie): """ This test verifies that the @auth_required decorator can be used to ensure that any access to a protected endpoint must have a properly structured auth header or cookie including a valid jwt token. Otherwise, a 401 error occurs with an informative error message. """ # Token is not in header or cookie response = client.get( "/protected", headers={}, ) exc_msg = textwrap.dedent(f""" Could not find token in any of the given locations: {default_guard.jwt_places} """).replace("\n", "") assert exc_msg in response.json["message"] assert response.status_code == 401 # Token has invalid structure response = client.get( "/protected", headers={"Authorization": "bad_structure iamatoken"}, ) assert "JWT header structure is invalid" in response.json["message"] assert response.status_code == 401 # Token is expired moment = pendulum.parse('2017-05-24 10:18:45') with plummet.frozen_time(moment): headers = default_guard.pack_header_for_user(self.the_dude) moment = (moment + default_guard.access_lifespan + pendulum.Duration(seconds=1)) with plummet.frozen_time(moment): response = client.get( "/protected", headers=headers, ) assert response.status_code == 401 assert "access permission has expired" in response.json["message"] # Token is present and valid in header or cookie with plummet.frozen_time('2017-05-24 10:38:45'): response = client.get( "/protected", headers=default_guard.pack_header_for_user(self.the_dude), ) assert response.status_code == 200 token = default_guard.encode_jwt_token(self.the_dude) with use_cookie(token): response = client.get("/protected") assert response.status_code == 200
def time_between(self, v1: Version, v2: Version) -> pendulum.Duration: """Get the time between two versions.""" versions_from_excl = dropwhile(lambda v: v <= v1, self.versions) versions_to_incl = takewhile(lambda v: v <= v2, versions_from_excl) durations = self.time_between_versions total_duration = pendulum.Duration(0) # Could use sum(...), but mypy gets confused? for v in versions_to_incl: total_duration += durations[v] return total_duration
def reduce_range(self) -> StreamSlice: """ This method is supposed to be called when slice processing failed. Reset next slice start date to previous one and reduce slice range by RANGE_REDUCE_FACTOR (2 times). Returns updated slice to try again. """ self._current_range = int(max(self._current_range / self.RANGE_REDUCE_FACTOR, self.INITIAL_RANGE_DAYS)) start_date = self._prev_start_date end_date = min(self._end_date, start_date + (pendulum.Duration(days=self._current_range))) self._start_date = end_date return StreamSlice(start_date=start_date, end_date=end_date)
def test_get_user_from_registration_token( self, app, user_class, db, default_guard, ): """ This test verifies that a user can be extracted from an email based registration token. Also verifies that a token that has expired cannot be used to fetch a user. Also verifies that a registration token may not be refreshed """ # create our default test user the_dude = user_class( username="******", email="*****@*****.**", password=default_guard.hash_password("abides"), ) db.session.add(the_dude) db.session.commit() reg_token = default_guard.encode_jwt_token( the_dude, bypass_user_check=True, is_registration_token=True, ) extracted_user = default_guard.get_user_from_registration_token( reg_token) assert extracted_user == the_dude """ test to ensure a registration token that is expired sets off an 'ExpiredAccessError' exception """ with plummet.frozen_time('2019-01-30 16:30:00'): expired_reg_token = default_guard.encode_jwt_token( the_dude, bypass_user_check=True, override_access_lifespan=pendulum.Duration(minutes=1), is_registration_token=True, ) with plummet.frozen_time('2019-01-30 16:40:00'): with pytest.raises(ExpiredAccessError): default_guard.get_user_from_registration_token( expired_reg_token)
def shift_csv_file(filename, output, shift='y', ts_field='ts'): # firstly, find first and last timestamp first_ts = None last_ts = None with open(filename, mode='r') as file: reader = csv.DictReader(file, delimiter=',') for row in reader: if not first_ts: first_ts = row[ts_field] last_ts = row[ts_field] first_ts = pendulum.parse(first_ts) last_ts = pendulum.parse(last_ts) # find proper shift shifts = {'d': pendulum.Duration(days=1), 'y': pendulum.duration(years=1)} mul = 0 delta = shifts[shift] now = pendulum.now() while first_ts + (mul * delta) < now and first_ts + ( (mul + 1) * delta) < now: mul += 1 # while (now - (first_ts+(mul+1)*delta)) delta *= mul with open(filename, mode='r') as file, open(output, mode='w') as outfile: reader = csv.DictReader(file, delimiter=',') writer = csv.DictWriter(outfile, fieldnames=reader.fieldnames, dialect='excel') writer.writeheader() previous_ts = None for row in reader: ts = pendulum.parse(row[ts_field]) + delta row[ts_field] = ts.format(DATE_FORMAT) if previous_ts != ts: writer.writerow(row) else: # throw away duplciatate timestamps (e.g. february 28 for years) pass previous_ts = ts
def __next__(self) -> StreamSlice: """ Generates next slice based on prevouis slice processing result. All the next slice range calculations should be done after calling adjust_range and reduce_range methods. """ if self._start_date >= self._end_date: raise StopIteration() if not self._range_adjusted: self._current_range = self.MAX_RANGE_DAYS next_start_date = min(self._end_date, self._start_date + pendulum.Duration(days=self._current_range)) slice = StreamSlice(start_date=self._start_date, end_date=next_start_date) self._prev_start_date = self._start_date self._start_date = next_start_date self._range_adjusted = False return slice
def test_get_user_from_registration_token(self, app, user_class, db): """ This test verifies that a user can be extracted from an email based registration token. Also verifies that a token that has expired cannot be used to fetch a user. Also verifies that a registration token may not be refreshed """ app.config['TESTING'] = True default_guard = Praetorian(app, user_class) # create our default test user the_dude = user_class( username='******', email='*****@*****.**', password=default_guard.hash_password('abides'), ) db.session.add(the_dude) db.session.commit() reg_token = default_guard.encode_jwt_token( the_dude, bypass_user_check=True, is_registration_token=True, ) extracted_user = default_guard.get_user_from_registration_token( reg_token) assert extracted_user == the_dude """ test to ensure a registration token that is expired sets off an 'ExpiredAccessError' exception """ moment = pendulum.parse('2019-01-30 16:30:00') with freezegun.freeze_time(moment): expired_reg_token = default_guard.encode_jwt_token( the_dude, bypass_user_check=True, override_access_lifespan=pendulum.Duration(minutes=1), is_registration_token=True, ) moment = pendulum.parse('2019-01-30 16:40:00') with freezegun.freeze_time(moment): with pytest.raises(ExpiredAccessError): default_guard.get_user_from_registration_token( expired_reg_token)
def timedelta_human(td: datetime.timedelta) -> str: parts = [] dur = pendulum.Duration(seconds=td.total_seconds()) if dur.days > 0: day_part = f'{dur.days} day' if dur.days > 1: day_part = f'{day_part}s' parts.append(day_part) if dur.hours > 0: hour_part = f'{dur.hours} hr' if dur.hours > 1: hour_part = f'{hour_part}s' parts.append(hour_part) if dur.minutes > 0: minute_part = f'{dur.minutes} min' if dur.minutes > 1: minute_part = f'{minute_part}s' parts.append(minute_part) return ' '.join(parts)
def articles_fetch(ctx, directory, first): """Fetch articles as PDFs""" api = RepublikApi(API_URL_REPUBLIK, ctx.obj["TOKEN"]) directory = pathlib.Path(directory) timestamp_path = directory / ".last" if not api.get_my_id(): raise click.BadArgumentUsage( f"Login failed, is the token '{api.token}' still valid?") last = None try: last = typing.cast(pendulum.DateTime, pendulum.parse(timestamp_path.read_text().strip())) articles = api.get_articles_since(last + pendulum.Duration(seconds=1)) if not articles: click.echo(f"No new articles published since {last}") return except (FileNotFoundError, PermissionError): articles = api.get_last_articles(first) if not articles: click.echo("No articles found, something is probably wrong") return directory.mkdir(parents=True, exist_ok=True) cdn = RepublikCDN() for article in articles: destination = directory / f"{article.publication_date} - {article.title}.pdf" click.echo(f"Fetching: {article.publication_date}: {article.title}") click.echo(f" -> {destination}") cdn.download_pdf(article.path, destination) timestamp_path.write_text(articles[0].publication_date.to_rfc3339_string())
def test_email_stream_chunked_encoding_exception(catalog, time_mock): TEST_START_DATE = "2020" DAYS_DURATION = 100 time_mock.move_to( pendulum.parse(TEST_START_DATE) + pendulum.Duration(days=DAYS_DURATION)) responses.add( "GET", "https://api.iterable.com/api/export/data.json", body=ChunkedEncodingError(), ) with pytest.raises( Exception, match="ChunkedEncodingError: Reached maximum number of retires: 3" ): read_from_source(catalog) assert len(responses.calls) == 3
def main(db_path, debug): if debug: logger.setLevel(logging.DEBUG) db = SqliteDatabase(path=db_path) end_date = pendulum.now() step = pendulum.Duration(minutes=1000) symbols = get_symbols() logging.info(f'Found {len(symbols)} symbols') for i, symbol in enumerate(symbols, 1): # get start date for symbol # this is either the last entry from the db # or the trading start date (from json file) latest_candle_date = db.get_latest_candle_date(symbol) if latest_candle_date is None: logging.debug('No previous entries in db. Starting from scratch') # TODO: handle case when symbol is missing from trading start days # e.g. symbol is in symbols.json but not in symbols_trading_start_days.json start_date = symbol_start_date(symbol) else: logging.debug('Found previous db entries. Resuming from latest') start_date = latest_candle_date logging.info( f'{i}/{len(symbols)} | {symbol} | Processing from {start_date.to_datetime_string()}' ) for d1, d2 in date_range(start_date, end_date, step): logging.debug(f'{d1} -> {d2}') # returns (max) 1000 candles, one for every minute candles = get_candles(symbol, d1, d2) logging.debug(f'Fetched {len(candles)} candles') if candles: db.insert_candles(symbol, candles) # prevent from api rate-limiting time.sleep(3) db.close()
def test_date_range(): """ Test that the iterator yields the correct end-date. """ start_date = pendulum.create(2015, 5, 12) end_date = pendulum.create(2015, 5, 13, 15, 0) d = pendulum.Duration(minutes=1000) ranges = [(d1, d2) for d1, d2 in date_range(start_date, end_date, d)] assert len(ranges) == 3 assert ranges[0][0] == start_date assert ranges[0][1] == pendulum.create(2015, 5, 12, 16, 40) # the start date of the next element should be the end # date of the first one assert ranges[1][0] == pendulum.create(2015, 5, 12, 16, 40) assert ranges[1][1] == pendulum.create(2015, 5, 13, 9, 20) # the last element shouldn't be later than the end date assert ranges[2][0] == pendulum.create(2015, 5, 13, 9, 20) assert ranges[2][1] == end_date
def parse_reldt(s): """ s takes the form datetime str [+-] duration str """ msg = [] plus = r'\s[+]\s' minus = r'\s[-]\s' sign = '' if re.search(plus, s): sign = '+' elif re.search(minus, s): sign = '-' logger.debug(f"s: {s}; sign: {sign}") if sign: if s[0] in ['+', '-']: dtm = '' dur = s[1:] else: parts = [x.strip() for x in re.split(r'[+-]\s', s)] dtm = parts[0] dur = f"{sign}{parts[1]}" if len(parts) > 1 else '' else: dtm = s.strip() dur = '' logger.debug(f"dtm: {dtm}; dur: {dur}") if dtm in date_shortcuts: dt = date_shortcuts[dtm]() else: dt = pendulum.parse(dtm, strict=False, tz='local') logger.debug(f"dt: {dt}") if dur: ok, du = parse_duration(dur) else: du = pendulum.Duration() logger.debug(f"dt: {dt}, du: {du}, dt+du: {dt+du}") return dt + du
def test_refresh_jwt_token( self, app, db, user_class, validating_user_class, ): """ This test:: * verifies that the refresh_jwt_token properly generates a refreshed jwt token. * ensures that a token who's access permission has not expired may not be refreshed. * ensures that a token who's access permission has expired must not have an expired refresh permission for a new token to be issued. * ensures that if an override_access_lifespan argument is supplied that it is used instead of the instance's access_lifespan. * ensures that the access_lifespan may not exceed the refresh lifespan. * ensures that if the user_class has the instance method validate(), it is called an any exceptions it raises are wrapped in an InvalidUserError. * verifies that if a user is no longer identifiable that a MissingUserError is raised * verifies that any custom claims in the original token's payload are also packaged in the new token's payload """ guard = Praetorian(app, user_class) the_dude = user_class( username='******', password=guard.encrypt_password('abides'), roles='admin,operator', ) db.session.add(the_dude) db.session.commit() moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token(the_dude) new_moment = (pendulum.parse('2017-05-21 18:39:55') + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN) + pendulum.Duration(minutes=1)) with freezegun.freeze_time(new_moment): new_token = guard.refresh_jwt_token(token) new_token_data = jwt.decode( new_token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert new_token_data['iat'] == new_moment.int_timestamp assert new_token_data['exp'] == ( new_moment + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN)).int_timestamp assert new_token_data['rf_exp'] == (moment + pendulum.Duration( **DEFAULT_JWT_REFRESH_LIFESPAN)).int_timestamp assert new_token_data['id'] == the_dude.id assert new_token_data['rls'] == 'admin,operator' moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token(the_dude) new_moment = (pendulum.parse('2017-05-21 18:39:55') + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN) + pendulum.Duration(minutes=1)) with freezegun.freeze_time(new_moment): new_token = guard.refresh_jwt_token( token, override_access_lifespan=pendulum.Duration(hours=2), ) new_token_data = jwt.decode( new_token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert new_token_data['exp'] == ( new_moment + pendulum.Duration(hours=2)).int_timestamp moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token( the_dude, override_refresh_lifespan=pendulum.Duration(hours=2), override_access_lifespan=pendulum.Duration(minutes=30), ) new_moment = moment + pendulum.Duration(minutes=31) with freezegun.freeze_time(new_moment): new_token = guard.refresh_jwt_token( token, override_access_lifespan=pendulum.Duration(hours=2), ) new_token_data = jwt.decode( new_token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert new_token_data['exp'] == new_token_data['rf_exp'] expiring_interval = (pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN) + pendulum.Duration(minutes=1)) validating_guard = Praetorian(app, validating_user_class) brandt = validating_user_class( username='******', password=guard.encrypt_password("can't watch"), is_active=True, ) db.session.add(brandt) db.session.commit() moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token(brandt) new_moment = moment + expiring_interval with freezegun.freeze_time(new_moment): validating_guard.refresh_jwt_token(token) brandt.is_active = False db.session.merge(brandt) db.session.commit() new_moment = new_moment + expiring_interval with freezegun.freeze_time(new_moment): with pytest.raises(InvalidUserError) as err_info: validating_guard.refresh_jwt_token(token) expected_message = 'The user is not valid or has had access revoked' assert expected_message in str(err_info.value) expiring_interval = (pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN) + pendulum.Duration(minutes=1)) guard = Praetorian(app, user_class) bunny = user_class( username='******', password=guard.encrypt_password("can't blow that far"), ) db.session.add(bunny) db.session.commit() moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token(bunny) db.session.delete(bunny) db.session.commit() new_moment = moment + expiring_interval with freezegun.freeze_time(new_moment): with pytest.raises(MissingUserError) as err_info: validating_guard.refresh_jwt_token(token) expected_message = 'Could not find the requested user' assert expected_message in str(err_info.value) moment = pendulum.parse('2018-08-14 09:05:24') with freezegun.freeze_time(moment): token = guard.encode_jwt_token( the_dude, duder='brief', el_duderino='not brief', ) new_moment = (pendulum.parse('2018-08-14 09:05:24') + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN) + pendulum.Duration(minutes=1)) with freezegun.freeze_time(new_moment): new_token = guard.refresh_jwt_token(token) new_token_data = jwt.decode( new_token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert new_token_data['iat'] == new_moment.int_timestamp assert new_token_data['exp'] == ( new_moment + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN)).int_timestamp assert new_token_data['rf_exp'] == (moment + pendulum.Duration( **DEFAULT_JWT_REFRESH_LIFESPAN)).int_timestamp assert new_token_data['id'] == the_dude.id assert new_token_data['rls'] == 'admin,operator' assert new_token_data['duder'] == 'brief' assert new_token_data['el_duderino'] == 'not brief'
def test_encode_jwt_token(self, app, user_class, validating_user_class): """ This test:: * verifies that the encode_jwt_token correctly encodes jwt data based on a user instance. * verifies that if a user specifies an override for the access lifespan it is used in lieu of the instance's access_lifespan. * verifies that the access_lifespan cannot exceed the refresh lifespan. * ensures that if the user_class has the instance method validate(), it is called an any exceptions it raises are wrapped in an InvalidUserError * verifies that custom claims may be encoded in the token and validates that the custom claims do not collide with reserved claims """ guard = Praetorian(app, user_class) the_dude = user_class( username='******', password=guard.encrypt_password('abides'), roles='admin,operator', ) moment = pendulum.parse('2017-05-21 18:39:55') with freezegun.freeze_time(moment): token = guard.encode_jwt_token(the_dude) token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['iat'] == moment.int_timestamp assert token_data['exp'] == ( moment + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN)).int_timestamp assert token_data['rf_exp'] == (moment + pendulum.Duration( **DEFAULT_JWT_REFRESH_LIFESPAN)).int_timestamp assert token_data['id'] == the_dude.id assert token_data['rls'] == 'admin,operator' moment = pendulum.parse('2017-05-21 18:39:55') override_access_lifespan = pendulum.Duration(minutes=1) override_refresh_lifespan = pendulum.Duration(hours=1) with freezegun.freeze_time(moment): token = guard.encode_jwt_token( the_dude, override_access_lifespan=override_access_lifespan, override_refresh_lifespan=override_refresh_lifespan, ) token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['iat'] == moment.int_timestamp assert token_data['exp'] == ( moment + override_access_lifespan).int_timestamp assert token_data['rf_exp'] == ( moment + override_refresh_lifespan).int_timestamp assert token_data['id'] == the_dude.id assert token_data['rls'] == 'admin,operator' moment = pendulum.parse('2017-05-21 18:39:55') override_access_lifespan = pendulum.Duration(hours=1) override_refresh_lifespan = pendulum.Duration(minutes=1) with freezegun.freeze_time(moment): token = guard.encode_jwt_token( the_dude, override_access_lifespan=override_access_lifespan, override_refresh_lifespan=override_refresh_lifespan, ) token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['iat'] == moment.int_timestamp assert token_data['exp'] == token_data['rf_exp'] assert token_data['rf_exp'] == ( moment + override_refresh_lifespan).int_timestamp assert token_data['id'] == the_dude.id assert token_data['rls'] == 'admin,operator' validating_guard = Praetorian(app, validating_user_class) brandt = validating_user_class( username='******', password=guard.encrypt_password("can't watch"), is_active=True, ) validating_guard.encode_jwt_token(brandt) brandt.is_active = False with pytest.raises(InvalidUserError) as err_info: validating_guard.encode_jwt_token(brandt) expected_message = 'The user is not valid or has had access revoked' assert expected_message in str(err_info.value) moment = pendulum.parse('2018-08-18 08:55:12') with freezegun.freeze_time(moment): token = guard.encode_jwt_token( the_dude, duder='brief', el_duderino='not brief', ) token_data = jwt.decode( token, guard.encode_key, algorithms=guard.allowed_algorithms, ) assert token_data['iat'] == moment.int_timestamp assert token_data['exp'] == ( moment + pendulum.Duration(**DEFAULT_JWT_ACCESS_LIFESPAN)).int_timestamp assert token_data['rf_exp'] == (moment + pendulum.Duration( **DEFAULT_JWT_REFRESH_LIFESPAN)).int_timestamp assert token_data['id'] == the_dude.id assert token_data['rls'] == 'admin,operator' assert token_data['duder'] == 'brief' assert token_data['el_duderino'] == 'not brief' with pytest.raises(ClaimCollisionError) as err_info: guard.encode_jwt_token(the_dude, exp='nice marmot') expected_message = 'custom claims collide' assert expected_message in str(err_info.value)
import os import pygrib import shutil from glob import glob import sys sys.path.append(f'{os.path.dirname(os.path.realpath(__file__))}/../utils') from utils import cli, ftp_exist, ftp_list, ftp_get if not os.getenv('CIMISS_FTP_HOST'): cli.error('CIMISS_FTP_HOST is not set!') if not os.getenv('CIMISS_FTP_USER'): cli.error('CIMISS_FTP_USER is not set!') if not os.getenv('CIMISS_FTP_PASSWD'): cli.error('CIMISS_FTP_PASSWD is not set!') time_interval = pendulum.Duration(hours=6) def parse_datetime(string): match = re.match(r'(\d{4}\d{2}\d{2}\d{2})(\d{2})?', string) if match.group(2): return pendulum.from_format(string, 'YYYYMMDDHHmm') else: return pendulum.from_format(string, 'YYYYMMDDHH') def parse_datetime_range(string): match = re.match(r'(\d{4}\d{2}\d{2}\d{2})-(\d{4}\d{2}\d{2}\d{2})', string) if not match: raise argparse.ArgumentError('"' + string + '" is not a datetime range (YYYYMMDDHH)!') return (pendulum.from_format(match.group(1), 'YYYYMMDDHH'), pendulum.from_format(match.group(2), 'YYYYMMDDHH')) def parse_forecast(string):
import logging import subprocess from subprocess import PIPE import json from tempfile import NamedTemporaryFile import uuid import pendulum from djali.couchdb import CloudiControl from quasimodo.amqp import QueueWorkerSkeleton from jinja2 import Environment, BaseLoader from esmeralda.defaults import ESMERALDA_CONFIG #: default *time out* duration TIME_OUT_DEFAULT = pendulum.Duration(hours=1) #: single host inventory file template INVENTORY_TEMPLATE = """[{{ group_name }}] {{ inventory_hostname }} """ def setup_ansible_environment(**kwargs): """ Set ansible environment variables for running a playbook. Values for omitted keyword arguments will fall back to their corresponding :py:data:`esmeralda.defaults.ESMERALDA_CONFIG` key/value pair. Keyword Args: ansible_root_path(str, optional): Path of playbooks and roles
'sha512_crypt', 'bcrypt', 'argon2', 'bcrypt_sha256', ] DEFAULT_HASH_DEPRECATED_SCHEMES = [] REFRESH_EXPIRATION_CLAIM = 'rf_exp' IS_REGISTRATION_TOKEN_CLAIM = 'is_ert' IS_RESET_TOKEN_CLAIM = 'is_prt' RESERVED_CLAIMS = { 'iat', 'exp', 'jti', 'id', 'rls', REFRESH_EXPIRATION_CLAIM, IS_REGISTRATION_TOKEN_CLAIM, IS_RESET_TOKEN_CLAIM, } # 1M days seems reasonable. If this code is being used in 3000 years...welp VITAM_AETERNUM = pendulum.Duration(days=1000000) class AccessType(enum.Enum): access = 'ACCESS' refresh = 'REFRESH' register = 'REGISTER' reset = 'RESET'