def get(self, args): authorize(args['uuid'], args['api_key']) # TODO: ADD CACHING forecast_type = args['forecast_type'] city = "%s,%s" % (args['city'], args['country_code']) headers = config.forecasts['headers'] try: weather = requests.get( url=urls[forecast_type], headers=headers, params={'q': city, 'units': args['units']} ).json() if False in [c.isdigit() for c in weather['cod']]: raise FailedDependency("invalid status code retrieved from offsite api") code = int(weather['cod']) if weather is None or code != 200: message = "failed to retrieve forecast from offsite api" if weather is not None: message += " - code:%d - %s" % (code, str(weather['message'])) raise FailedDependency(message) except (FailedDependency, requests.exceptions.RequestException) as exception: if isinstance(exception, FailedDependency): raise exception raise FailedDependency("failed to retrieve forecast from offsite api") if 'tts' in args: # fix this to implement forecast types try: text = tts.tts( {"forecast":weather}, datetime.today() + timedelta(days=1) ) return { "tts": text, "forecast": weather } # TODO: implement more specific error handling except Exception as e: print(e) raise InternalServerError() return {"forecast": weather}
def get(self): """Return text to use for training Returns a random text from the training corpus that doesn't have a training by the logged user""" try: user = User.objects.get(email=get_jwt_identity()) texts = list( Text.objects.aggregate(*[ { '$lookup': { 'from': Training._get_collection_name(), 'localField': '_id', 'foreignField': 'text_id', 'as': 'trainings' } }, { '$project': { '_id': 1, 'value': 1, 'trainings': { '$filter': { 'input': '$trainings', 'cond': { 'user_id': { '$eq': [ 'user_id', user.pk ] } } } } } }, { '$match': { 'trainings.0': { '$exists': False } } }, { '$sample': { 'size': 1 } } ]) ) if not texts: return '', 204 text = texts[0] snapshot = Snapshot.current() spacy_document = nlp_task.apply_async( [text['value']], queue=str(snapshot)).wait() return { 'text_id': str(text['_id']), 'snapshot': snapshot, 'spacy_document': spacy_document } except TimeoutError: raise FailedDependency()
def main(): text = request.form.get('text') is_html = bool(BeautifulSoup(text, "html.parser").find()) metadata = request.form.get('metadata') try: metadata = json.loads(metadata) metadata['id'] except (json.JSONDecodeError, KeyError): raise BadRequest() try: out = format_output(annotate(text, is_html), metadata, StringIO()).getvalue() except Exception: raise FailedDependency() return out
def get(self, args): authorize(args['uuid'], args['api_key']) news_type = args['news_type'] url = f"https://api.nytimes.com/svc/topstories/v2/{news_type}.json" try: response = requests.get(url=url, params={ "api-key": config.news['api_key'] }).json() if response is None or 'status' not in response or response[ 'status'] != "OK": raise FailedDependency( "failed to retrieve news from offsite api") return jsonify({"news": response['results']}) except (FailedDependency, requests.exceptions.RequestException) as exception: if isinstance(exception, FailedDependency): raise exception raise InternalServerError()
class TrainResource(MethodView): """Returns a random text""" @jwt_and_role_required(Role.TRAINER) @blp.doc(operationId="trainNer", responses={ '204': {'description': "No documents left to train"} }) @blp.response(TrainTextSchema, code=200, description="Training entity") @response_error(NotFound("Corpus not found")) @response_error(FailedDependency("Failed to infer entities")) def get(self): """Return text to use for training Returns a random text from the training corpus that doesn't have a training by the logged user""" try: user = User.objects.get(email=get_jwt_identity()) texts = list( Text.objects.aggregate(*[ { '$lookup': { 'from': Training._get_collection_name(), 'localField': '_id', 'foreignField': 'text_id', 'as': 'trainings' } }, { '$project': { '_id': 1, 'value': 1, 'trainings': { '$filter': { 'input': '$trainings', 'cond': { 'user_id': { '$eq': [ 'user_id', user.pk ] } } } } } }, { '$match': { 'trainings.0': { '$exists': False } } }, { '$sample': { 'size': 1 } } ]) ) if not texts: return '', 204 text = texts[0] snapshot = Snapshot.current() spacy_document = nlp_task.apply_async( [text['value']], queue=str(snapshot)).wait() return { 'text_id': str(text['_id']), 'snapshot': snapshot, 'spacy_document': spacy_document } except TimeoutError: raise FailedDependency()
def handle_exception(e): log.debug( 'LDAP server not accessible while trying to login to opds feed' ) return error_http(FailedDependency())
def post(): # Leer valores del formulario. try: validate_captcha() tp = request.form["tp"] files = get_files() body = request.form["body"] or "" tipo = request.form["tipo"] identificador = request.form["identificador"] except KeyError as ex: raise InvalidForm( f"Formulario inválido sin campo {ex.args[0]!r}") from ex # Obtener alumnes que realizan la entrega. planilla = fetch_planilla() try: alulist = planilla.get_alulist(identificador) except KeyError as ex: raise InvalidForm( f"No se encuentra grupo o legajo {identificador!r}") from ex # Validar varios aspectos de la entrega. if tp not in cfg.entregas: raise InvalidForm(f"La entrega {tp!r} es inválida") elif len(alulist) > 1 and cfg.entregas[tp] != Modalidad.GRUPAL: raise ValueError(f"La entrega {tp} debe ser individual") elif tipo == "entrega" and not files: raise InvalidForm( "No se ha adjuntado ningún archivo con extensión válida.") elif tipo == "ausencia" and not body: raise InvalidForm( "No se ha adjuntado una justificación para la ausencia.") # Encontrar a le docente correspondiente. if cfg.entregas[tp] == Modalidad.INDIVIDUAL: docente = alulist[0].ayudante_indiv elif cfg.entregas[tp] == Modalidad.GRUPAL: docente = alulist[0].ayudante_grupal else: docente = None if not docente and cfg.entregas[tp] != Modalidad.PARCIALITO: legajos = ", ".join(x.legajo for x in alulist) raise FailedDependency( f"No hay corrector para la entrega {tp} de {legajos}") email = make_email(tp.upper(), alulist, docente, body) legajos = utils.sorted_strnum([x.legajo for x in alulist]) if tipo == "ausencia": rawzip = io.BytesIO() email.replace_header("Subject", email["Subject"] + " (ausencia)") with zipfile.ZipFile(rawzip, "w") as zf: zf.writestr("ausencia.txt", body + "\n") entrega = File(rawzip.getvalue(), f"{tp}_ausencia.zip") else: entrega = zipfile_for_entrega(files) # Incluir el único archivo ZIP. part = MIMEBase("application", "zip") part.set_payload(entrega.content) encoders.encode_base64(part) part.add_header("Content-Disposition", "attachment", filename=entrega.filename) email.attach(part) # Determinar la ruta en algo2_entregas (se hace caso especial para los parcialitos). tp_id = tp.lower() if cfg.entregas[tp] != Modalidad.PARCIALITO: # Ruta tradicional: pila/2020_1/54321 relpath_base = pathlib.PurePath(tp_id) / cfg.cuatri else: # Ruta específica para parcialitos: parcialitos/2020_1/parcialito1_r2/54321 relpath_base = pathlib.PurePath("parcialitos") / cfg.cuatri / tp_id task = CorrectorTask( tp_id=tp_id, legajos=legajos, zipfile=entrega.content, orig_headers=dict(email.items()), repo_relpath=relpath_base / "_".join(legajos), ) task_queue.enqueue(corregir_entrega, task) if not cfg.test: # TODO: en lugar de enviar un mail, que es lento, hacer un commit en la # copia local de algo2_entregas. utils.sendmail(email, oauth_credentials()) return render_template( "result.html", tp=tp, email="\n".join(f"{k}: {v}" for k, v in email.items()) if cfg.test else None, )