def get(self): args = parser.parse_args() all_years = YearsDAO.get_years_from_db() result = [] for k, v in all_years.items(): if k.upper() in app.config.LEAGUES or k == 'euskotren': if args['historial'] and inputs.boolean(args['historial']): if args['year'] > 2009: result.append({'name': k, 'years': v}) else: result.append({'name': k, 'years': v}) return result
def test_non_strings(self): assert inputs.boolean(0) is False assert inputs.boolean(1) is True assert inputs.boolean([]) is False
def test_bad_boolean(self): with pytest.raises(ValueError): inputs.boolean('blah') with pytest.raises(ValueError): inputs.boolean(None)
def test_checkbox(self): assert inputs.boolean('on') is True
def test_case(self): assert inputs.boolean('FaLSE') is False assert inputs.boolean('FaLSE') is False
def test_python_bool(self): assert inputs.boolean(True) is True assert inputs.boolean(False) is False
def test_true(self): assert inputs.boolean('true') is True
def test_false(self): assert inputs.boolean("False") is False
def test_false(self): assert inputs.boolean('False') is False
def test_0(self): assert inputs.boolean('0') is False
from flask_restx import reqparse, inputs from environment_settings import LIQPAY_SANDBOX_BY_DEFAULT_ENABLED parser_query = reqparse.RequestParser() parser_query.add_argument( "sandbox", type=inputs.boolean, default=inputs.boolean(LIQPAY_SANDBOX_BY_DEFAULT_ENABLED))
def test_checkbox(self): assert inputs.boolean("on") is True
def test_true(self): assert inputs.boolean("true") is True
def post(self): """create new user rule""" user_rules_index = app.config['USER_RULES_INDEX'] request_data = request.json or request.form rule_name = request_data.get('rule_name') hysds_io = request_data.get('workflow') job_spec = request_data.get('job_spec') priority = int(request_data.get('priority', 0)) query_string = request_data.get('query_string') kwargs = request_data.get('kwargs', '{}') queue = request_data.get('queue') tags = request_data.get('tags', []) time_limit = request_data.get('time_limit', None) soft_time_limit = request_data.get('soft_time_limit', None) disk_usage = request_data.get('disk_usage', None) enable_dedup = request_data.get('enable_dedup') if enable_dedup is not None: try: enable_dedup = inputs.boolean(enable_dedup) except ValueError as e: return {'success': False, 'message': str(e)}, 400 username = "******" # TODO: add user role and permissions, hard coded to "ops" for now if not rule_name or not hysds_io or not job_spec or not query_string or not queue: missing_params = [] if not rule_name: missing_params.append('rule_name') if not hysds_io: missing_params.append('workflow') if not job_spec: missing_params.append('job_spec') if not query_string: missing_params.append('query_string') if not queue: missing_params.append('queue') return { 'success': False, 'message': 'Params not specified: %s' % ', '.join(missing_params), 'result': None, }, 400 if len(rule_name) > 64: return { "success": False, "message": "rule_name needs to be less than 64 characters", "result": None, }, 400 try: json.loads(query_string) except (ValueError, TypeError) as e: app.logger.error(e) return { 'success': False, 'message': 'invalid elasticsearch query JSON' }, 400 try: json.loads(kwargs) except (ValueError, TypeError) as e: app.logger.error(e) return {'success': False, 'message': 'invalid JSON: kwargs'}, 400 # check if rule name already exists rule_exists_query = {"query": {"term": {"rule_name": rule_name}}} existing_rules_count = mozart_es.get_count(index=user_rules_index, body=rule_exists_query) if existing_rules_count > 0: return { 'success': False, 'message': 'user rule already exists: %s' % rule_name }, 409 # check if job_type (hysds_io) exists in elasticsearch job_type = mozart_es.get_by_id(index=HYSDS_IOS_INDEX, id=hysds_io, ignore=404) if job_type['found'] is False: return { 'success': False, 'message': '%s not found' % hysds_io }, 400 params = job_type['_source']['params'] is_passthrough_query = check_passthrough_query(params) if type(tags) == str: tags = [tags] now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') new_doc = { "workflow": hysds_io, "job_spec": job_spec, "priority": priority, "rule_name": rule_name, "username": username, "query_string": query_string, "kwargs": kwargs, "job_type": hysds_io, "enabled": True, "passthru_query": is_passthrough_query, "query_all": False, "queue": queue, "modified_time": now, "creation_time": now, "tags": tags } if time_limit and isinstance(time_limit, int): if time_limit <= 0 or time_limit > 86400 * 7: return { 'success': False, 'message': 'time_limit must be between 0 and 604800 (sec)' }, 400 else: new_doc['time_limit'] = time_limit if soft_time_limit and isinstance(soft_time_limit, int): if soft_time_limit <= 0 or soft_time_limit > 86400 * 7: return { 'success': False, 'message': 'soft_time_limit must be between 0 and 604800 (sec)' }, 400 else: new_doc['soft_time_limit'] = soft_time_limit if disk_usage: new_doc['disk_usage'] = disk_usage if enable_dedup is not None: new_doc['enable_dedup'] = enable_dedup result = mozart_es.index_document(index=user_rules_index, body=new_doc, refresh=True) return {'success': True, 'message': 'rule created', 'result': result}
def test_1(self): assert inputs.boolean('1') is True
def put(self): """edit existing user rule""" request_data = request.json or request.form _id = request_data.get("id", None) _rule_name = request_data.get("rule_name", None) if not _id and not _rule_name: return { "success": False, "message": "Must specify id or rule_name in the request" }, 400 user_rules_index = app.config['USER_RULES_INDEX'] rule_name = request_data.get('rule_name') hysds_io = request_data.get('workflow') job_spec = request_data.get('job_spec') priority = request_data.get('priority') query_string = request_data.get('query_string') kwargs = request_data.get('kwargs') queue = request_data.get('queue') enabled = request_data.get('enabled') tags = request_data.get('tags') time_limit = request_data.get('time_limit', None) soft_time_limit = request_data.get('soft_time_limit', None) disk_usage = request_data.get('disk_usage', None) enable_dedup = request_data.get('enable_dedup') if enable_dedup is not None: try: enable_dedup = inputs.boolean(enable_dedup) except ValueError as e: return {'success': False, 'message': str(e)}, 400 # check if job_type (hysds_io) exists in elasticsearch (only if we're updating job_type) if hysds_io: job_type = mozart_es.get_by_id(index=HYSDS_IOS_INDEX, id=hysds_io, ignore=404) if job_type.get("found", False) is False: return { 'success': False, 'message': 'job_type not found: %s' % hysds_io }, 404 if _id: app.logger.info('finding existing user rule: %s' % _id) existing_rule = mozart_es.get_by_id(index=user_rules_index, id=_id, ignore=404) if existing_rule.get("found", False) is False: app.logger.info('rule not found %s' % _id) return { 'result': False, 'message': 'user rule not found: %s' % _id }, 404 elif _rule_name: app.logger.info('finding existing user rule: %s' % _rule_name) result = mozart_es.search(index=user_rules_index, q="rule_name:{}".format(_rule_name), ignore=404) if result.get("hits", {}).get("total", {}).get("value", 0) == 0: return { 'success': False, 'message': 'rule %s not found' % _rule_name }, 404 else: _id = result.get("hits").get("hits")[0].get("_id") update_doc = {} if rule_name: if len(rule_name) > 64: return { "success": False, "message": "rule_name needs to be less than 64 characters", "result": None, }, 400 update_doc['rule_name'] = rule_name if hysds_io: update_doc['workflow'] = hysds_io update_doc['job_type'] = hysds_io if job_spec: update_doc['job_spec'] = job_spec if priority: update_doc['priority'] = int(priority) if query_string: update_doc['query_string'] = query_string try: json.loads(query_string) except (ValueError, TypeError) as e: app.logger.error(e) return { 'success': False, 'message': 'invalid elasticsearch query JSON' }, 400 if kwargs: update_doc['kwargs'] = kwargs try: json.loads(kwargs) except (ValueError, TypeError) as e: app.logger.error(e) return { 'success': False, 'message': 'invalid JSON: kwargs' }, 400 if queue: update_doc['queue'] = queue if enabled is not None: if isinstance(enabled, str): if enabled.lower() == "false": value = False else: value = True update_doc["enabled"] = value else: update_doc["enabled"] = enabled if tags is not None: if type(tags) == str: tags = [tags] update_doc['tags'] = tags update_doc['modified_time'] = datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%SZ') if 'time_limit' in request_data: # if submitted in editor if time_limit is None: update_doc['time_limit'] = None else: if isinstance(time_limit, int) and 0 < time_limit <= 86400 * 7: update_doc['time_limit'] = time_limit else: return { 'success': False, 'message': 'time_limit must be between 0 and 604800 (sec)' }, 400 if 'soft_time_limit' in request_data: # if submitted in editor if soft_time_limit is None: update_doc['soft_time_limit'] = None else: if isinstance(soft_time_limit, int) and 0 < soft_time_limit <= 86400 * 7: update_doc['soft_time_limit'] = soft_time_limit else: return { 'success': False, 'message': 'time_limit must be between 0 and 604800 (sec)' }, 400 if 'disk_usage' in request_data: update_doc['disk_usage'] = disk_usage if 'enable_dedup' in request_data: update_doc['enable_dedup'] = enable_dedup app.logger.info('editing document id %s in user_rule index' % _id) doc = {'doc_as_upsert': True, 'doc': update_doc} result = mozart_es.update_document(index=user_rules_index, id=_id, body=doc, refresh=True) app.logger.info(result) app.logger.info('document updated: %s' % _id) return {'success': True, 'id': _id, 'updated': update_doc}
def post(self): """ submits on demand job :return: submit job id """ request_data = request.json if not request_data: request_data = request.form tag = request_data.get('tags') job_type = request_data.get('job_type') hysds_io = request_data.get('hysds_io') queue = request_data.get('queue') priority = int(request_data.get('priority', 0)) query_string = request_data.get('query') kwargs = request_data.get('kwargs', '{}') time_limit = request_data.get('time_limit') soft_time_limit = request_data.get('soft_time_limit') disk_usage = request_data.get('disk_usage') enable_dedup = request_data.get('enable_dedup') if enable_dedup is not None: try: enable_dedup = inputs.boolean(enable_dedup) except ValueError as e: return { 'success': False, 'message': str(e) }, 400 try: query = json.loads(query_string) query_string = json.dumps(query) except (ValueError, TypeError, Exception) as e: app.logger.error(e) return { 'success': False, 'message': 'invalid JSON query' }, 400 if tag is None or job_type is None or hysds_io is None or queue is None or query_string is None: return { 'success': False, 'message': 'missing field: [tags, job_type, hysds_io, queue, query]' }, 400 doc = mozart_es.get_by_id(index=HYSDS_IOS_INDEX, id=hysds_io, ignore=404) if doc['found'] is False: app.logger.error('failed to fetch %s, not found in hysds_ios' % hysds_io) return { 'success': False, 'message': '%s not found' % hysds_io }, 404 params = doc['_source']['params'] is_passthrough_query = check_passthrough_query(params) rule = { 'username': '******', 'workflow': hysds_io, 'priority': priority, 'enabled': True, 'job_type': job_type, 'rule_name': tag, 'kwargs': kwargs, 'query_string': query_string, 'query': query, 'passthru_query': is_passthrough_query, 'query_all': False, 'queue': queue } if time_limit and isinstance(time_limit, int): if time_limit <= 0 or time_limit > 86400 * 7: return { 'success': False, 'message': 'time_limit must be between 0 and 604800 (sec)' }, 400 else: rule['time_limit'] = time_limit if soft_time_limit and isinstance(soft_time_limit, int): if soft_time_limit <= 0 or soft_time_limit > 86400 * 7: return { 'success': False, 'message': 'soft_time_limit must be between 0 and 604800 (sec)' }, 400 else: rule['soft_time_limit'] = soft_time_limit if disk_usage: rule['disk_usage'] = disk_usage if enable_dedup is not None: rule['enable_dedup'] = enable_dedup payload = { 'type': 'job_iterator', 'function': 'hysds_commons.job_iterator.iterate', 'args': ["figaro", rule], } on_demand_job_queue = celery_app.conf['ON_DEMAND_JOB_QUEUE'] celery_task = do_submit_task(payload, on_demand_job_queue) return { 'success': True, 'message': 'task submitted successfully', 'result': celery_task.id }