def make_job_app(config_file): """ Context-manager to make a Flask app and RQ Queue for running the RQ scheduler and workers Parameters ---------- config_file : file-path Path to a python configuration file. Important parameters include the Redis connection keys, the Auth0 connection parameters, and the MySQL connection parameters. Yields ------ app The Flask app, which has a running context for access to flask.current_app queue The RQ queue, using app.config['SCHEDULER_QUEUE'], with an active Redis connection """ app = Flask('scheduled_jobs') app.config.from_pyfile(config_file) with app.app_context(): queue = get_queue(app.config['SCHEDULER_QUEUE']) yield app, queue
def test_get_queue_fake(mocker, app, name): mocked = mocker.patch('fakeredis.FakeStrictRedis') with app.app_context(): current_app.config['USE_FAKE_REDIS'] = True q = queuing.get_queue(name) assert isinstance(q, Queue) assert q.name == name assert mocked.called
def test_get_queue_real(mocker, app, name): mocked = mocker.patch.object(queuing, 'make_redis_connection') with app.app_context(): current_app.config['USE_FAKE_REDIS'] = False q = queuing.get_queue(name) assert isinstance(q, Queue) assert q.name == name assert mocked.called
def enqueue_report(report_id, base_url): alt_base_url = current_app.config.get('JOB_BASE_URL') if alt_base_url is not None: base_url = alt_base_url q = get_queue('reports') q.enqueue(compute_report, HiddenToken(current_access_token), report_id, base_url=base_url, result_ttl=0, job_timeout=current_app.config['REPORT_JOB_TIMEOUT'])
def queue(app): return get_queue(app.config['SCHEDULER_QUEUE'])
def post(self, observation_id, *args): """ --- summary: Add Observation data. description: | Add new timeseries values to the Observation entry. Float values *will be rounded* to 8 decimal places before storage. tags: - Observations parameters: - observation_id requestBody: required: True content: application/json: schema: $ref: '#/components/schemas/ObservationValuesPost' text/csv: schema: type: string description: | Text file with fields separated by ',' and lines separated by '\\n'. The first line must be a header with the following fields: timestamp, value, quality_flag. Timestamp must be an ISO 8601 datetime, value may be an integer or float, quality_flag may be 0 or 1 (indicating the value is not to be trusted). '#' is parsed as a comment character. Values that will be interpreted as NaN include the empty string, -999.0, -9999.0, 'nan', 'NaN', 'NA', 'N/A', 'n/a', 'null'. example: |- # comment line timestamp,value,quality_flag 2018-10-29T12:00:00Z,32.93,0 2018-10-29T13:00:00Z,25.17,0 2018-10-29T14:00:00Z,,1 # this value is NaN responses: 201: $ref: '#/components/responses/201-Created' 400: $ref: '#/components/responses/400-BadRequest' 401: $ref: '#/components/responses/401-Unauthorized' 404: $ref: '#/components/responses/404-NotFound' 413: $ref: '#/components/responses/413-PayloadTooLarge' """ run_validation = 'donotvalidate' not in request.args if run_validation: # users should only upload 0 or 1 for quality_flag qf_range = [0, 1] else: # but the validation task will post the quality flag # up a 2 byte unsigned int qf_range = [0, 2**16 - 1] observation_df = validate_observation_values( validate_parsable_values(), qf_range) observation_df = observation_df.set_index('timestamp') storage = get_storage() interval_length, previous_time, _, is_event = ( storage.read_metadata_for_observation_values( observation_id, observation_df.index[0])) validate_index_period(observation_df.index, interval_length, previous_time) if is_event: validate_event_data(observation_df) stored = storage.store_observation_values(observation_id, observation_df) if run_validation: q = get_queue() q.enqueue(tasks.fetch_and_validate_observation, HiddenToken(current_access_token), observation_id, observation_df.index[0].isoformat(), observation_df.index[-1].isoformat(), base_url=(current_app.config['JOB_BASE_URL'] or request.url_root.rstrip('/')), result_ttl=0, job_timeout=current_app.config['VALIDATION_JOB_TIMEOUT']) return stored, 201