Ejemplo n.º 1
0
    def delete(self, slug: str):
        admin_key = request.get_json().get("admin_key", "")
        if admin_key != current_app.config.get("ADMIN_KEY"):
            logger.warning("Invalid admin password")
            return json_response({"error": "Invalid admin password"},
                                 status=401)

        tag = db.session.query(Tag).filter(Tag.slug == slug).first()

        if tag is None:
            return json_response({"error": "Tag not found"}, status=404)

        logger.warning("Removing {}".format(tag))
        db.session.delete(tag)
        db.session.commit()

        rv = {
            "data": tag.to_dict(include_related_tags="full"),
            "theses": [thesis.to_dict() for thesis in tag.theses],
            "elections": {
                thesis.election_id: thesis.election.to_dict()
                for thesis in tag.theses
            },
        }

        return json_response(rv)
Ejemplo n.º 2
0
def load_quiz_answers():
    """Load quiz answers from quiz_answers.json"""
    try:
        with open("../userdata/quiz_answers.json") as f:
            qa_export = json.load(f)
    except FileNotFoundError:
        logger.warning(
            "File ../userdata/quiz_answers.json not found - quiz answers were not"
            + "imported"
        )
        return

    assert qa_export["meta"]["api"] == API_FULL_NAME

    logger.info("Adding {} quiz answers...".format(len(qa_export["data"])))

    for qa_data in qa_export["data"]:
        d = dateutil.parser.parse(qa_data["date"]).date()
        qa = QuizAnswer(
            thesis_id=qa_data["thesis"],
            uuid=qa_data["uuid"],
            date=d,
            answer=qa_data["answer"],
        )
        yield qa
Ejemplo n.º 3
0
    def post(self, thesis_id: str):
        log_request_info("Thesis tags update", request)

        thesis = db.session.query(Thesis).get(thesis_id)
        data = request.get_json()
        error = None

        if thesis is None:
            return json_response({"error": "Thesis not found"}, status=404)

        if data is None or data.get("admin_key",
                                    "") != current_app.config.get("ADMIN_KEY"):
            logger.warning("Invalid admin key")
            error = "Invalid admin key"
        else:
            for tag_data in data.get("add", []):
                tag = (db.session.query(Tag).filter(
                    Tag.wikidata_id == tag_data["wikidata_id"]).first())
                if tag is None:
                    tag = db.session.query(Tag).filter_by(
                        title=tag_data["title"]).first()

                if tag is None:
                    tag = Tag(
                        description=tag_data.get("description", None),
                        title=tag_data["title"],
                        url=tag_data["url"],
                        wikidata_id=tag_data["wikidata_id"],
                        image=tag_data.get("image", None),
                    )

                    tag.make_slug()
                    logger.info("New tag {}".format(tag))

                tag.wikipedia_title = tag_data.get("wikipedia_title", None)
                tag.labels = ";".join(tag_data.get("labels", []))
                tag.aliases = ";".join(tag_data.get("aliases", []))

                logger.info("Appending {} to {}".format(tag, thesis))
                thesis.tags.append(tag)

            if len(data.get("remove", [])) > 0:
                logger.info("Removing tags {}".format(", ".join(
                    data.get("remove"))))
                thesis.tags = [
                    tag for tag in thesis.tags
                    if tag.title not in data.get("remove")
                ]

            db.session.add(thesis)
            db.session.commit()

        if error is not None:
            return json_response({"error": error})
        else:
            return json_response({"data": thesis.to_dict(), "error": error})
Ejemplo n.º 4
0
def load_wahlergebnisse():
    """Load Wahlergebnisse from wahlergebnisse submodule."""

    try:
        with open("../wahlergebnisse/wahlergebnisse.json") as f:
            wahlergebnisse = json.load(f)
    except FileNotFoundError:
        logger.warning(
            "wahlergebnisse/wahlergebnisse.json not found. Is "
            + "the submodule initialised?"
        )
        quit()

    return wahlergebnisse
Ejemplo n.º 5
0
def load_data_file(fp, index=False):
    """Load JSON encoded data from disk with option to index."""
    rv = None
    try:
        with open(fp) as f:
            rv = json.load(f)
    except FileNotFoundError:
        logger.warning("File {} is missing".format(fp))
    else:
        if index:
            # Instead of returning a list, assign each item in rv to
            # a dict, indexed by its 'id' value, and return that
            rv_indexed = dict()
            for entry in rv:
                rv_indexed[entry["id"]] = entry
            rv = rv_indexed
    return rv
Ejemplo n.º 6
0
async def update_registro(request):
    logger.info('solicitado endpoint de insertar un nuevo registro')
    params = await request.json()
    if 'id' not in params:
        logger.warning(f'no se ha pasado el parametro id')
        return web.Response(status=400, text=f'el parametro id es obligatorio')
    if auth.user == 'rasp':
        r = insert(params['id'])
    else:
        r = insert(params['id'], oficina=0)
    if r != 0:
        logger.warning(
            f'ha ocurido un error insertando registo en la base de datos: {r}')
        return web.Response(
            status=500,
            text=f'error interno insertando registro en la base de datos')

    return web.Response(status=200, text='registro guardado correctamente')
Ejemplo n.º 7
0
def load_tags():
    """Load tags from exported tags.json."""
    try:
        with open("../userdata/tags.json") as f:
            tag_export = json.load(f)
    except FileNotFoundError:
        logger.warning("File ../userdata/tags.json not found - tags were not imported")
        return

    if tag_export["meta"]["api"] != API_FULL_NAME:
        raise ValueError("Tag export has Version '{}' but should be '{}'".format(tag_export["meta"]["api"], API_FULL_NAME))
    logger.info("Adding {} tags...".format(len(tag_export["data"])))

    for tag_data in tag_export["data"]:
        tag = Tag(
            title=tag_data["title"],
            slug=tag_data["slug"],
            url=tag_data["url"],
            wikidata_id=tag_data["wikidata_id"],
        )

        tag.description = tag_data.get("description", None)
        tag.wikipedia_title = tag_data.get("wikipedia_title", None)
        tag.labels = ";".join(tag_data.get("labels", []))
        tag.aliases = ";".join(tag_data.get("aliases", []))
        tag.image = tag_data.get("image", None)

        if tag.description is not None and len(tag.description) > 1:
            # Always start with upper case
            tag.description = tag.description[0].upper() + tag.description[1:]

            # Remove non-descriptions
            if tag.description.startswith("Wikimedia-"):
                tag.description = None

        for thesis_id in tag_data["theses"]:
            tag.theses.append(Thesis.query.get(thesis_id))

        yield tag
Ejemplo n.º 8
0
def load_results():
    """Match election records to the existing election datasets."""
    logger.info("Matching election results...")

    with open("../wahlergebnisse/wahlergebnisse.json") as f:
        result_data = json.load(f)
    with open("../userdata/substitutions.json") as f:
        substitutions = defaultdict(list)
        substitutions.update(json.load(f))

    for occ in db.session.query(Election).all():
        dt = occ.date.date()
        occ_results = [
            o
            for o in result_data
            if o["territory"].lower().startswith(occ.territory.lower()[:2])
            and dateutil.parser.parse(o["date"]).date() == dt
        ]

        matched_results = set()

        if len(occ_results) == 0:
            logger.error("Didn't find results for {}. Removing from db..".format(occ))
            for th in occ.theses:
                for pos in th.positions:
                    db.session.delete(pos)
                db.session.delete(th)
            db.session.delete(occ)
        else:
            res = occ_results[0]

            if "preliminary" in res and res["preliminary"] == True:
                logger.warning("Marking {} as preliminary".format(occ))
                occ.preliminary = True
                yield occ

            parties = set([p.party for p in occ.theses[0].positions])
            for p in parties:
                options = [p.name.lower()] + list(map(str.lower, substitutions[p.name]))
                matches = [
                    (name, result)
                    for name, result in res["results"].items()
                    if name.lower() in options
                ]

                if len(matches) > 0:
                    for match in matches:
                        if match[0].lower() != p.name.lower():
                            logger.warning(
                                "Assigned WOM text from {} to election result of {} in {}".format(
                                    p, match[0], res["title"]
                                )
                            )
                        matched_results.add(match[0])
                        votes = match[1]["votes"] if "votes" in match[1] else None
                        yield Result(
                            election=occ,
                            party=p,
                            party_repr=match[0],
                            votes=votes,
                            pct=match[1]["pct"],
                            source_url=res["url"],
                            source_name="Tagesschau Wahlarchiv"
                        )
                else:
                    if occ.preliminary:
                        logger.info("{} missing vote count for  {}".format(occ, p))
                    else:
                        logger.error("No vote count for {} in {}".format(p, occ))

            # Add results missing in Wahl-o-Mat
            for p_name, match in res["results"].items():
                if p_name in list(matched_results):
                    continue

                # Try and assign a unified party instance to this election
                # result to merge parties that have changed their name over
                # time

                party = None
                if p_name in party_instances.keys():
                    party = party_instances[p_name]
                else:
                    for (name, subs) in substitutions.items():
                        if p_name in subs:
                            if name in party_instances.keys():
                                party = party_instances[name]
                                logger.info(
                                    "Linked statement {} to election result of '{}' in {}".format(
                                        party, p_name, res["title"]
                                    )
                                )
                            break

                if party is None:
                    party = Party(name=p_name)
                    party_instances[p_name] = party

                yield Result(
                    election=occ,
                    party_repr=p_name,
                    party=party,
                    votes=match["votes"] if "votes" in match else None,
                    pct=match["pct"],
                    source_url=res["url"],
                    source_name="Tagesschau Wahlarchiv",
                    wom=False,
                )
Ejemplo n.º 9
0
                    source_name="Tagesschau Wahlarchiv",
                    wom=False,
                )


if __name__ == "__main__":
    app = create_app()
    with app.app_context():
        try:
            for obj in load_elections():
                db.session.add(obj)
                logger.info("Added {}".format(obj))

            for result in load_results():
                db.session.add(result)

            for tag in load_tags():
                db.session.add(tag)

            for quiz_answer in load_quiz_answers():
                db.session.add(quiz_answer)

            logger.info("Committing session to disk...")
            db.session.commit()
        except:
            db.session.rollback()
            raise
        finally:
            logger.info("Done")
            logger.warning("Clear and refill caches!")
Ejemplo n.º 10
0
from sqlalchemy.schema import DropTable
from sqlalchemy.ext.compiler import compiles

sys.path.append("./app/")

from main import create_app
from services import db
from services.logger import logger

# Add `CASCADE` to drop table statement
# https://stackoverflow.com/a/38679457
@compiles(DropTable, "postgresql")
def _compile_drop_table(element, compiler, **kwargs):
    return compiler.visit_drop_table(element) + " CASCADE"

if __name__ == '__main__':
    app = create_app()
    arg_force = "--force" in sys.argv

    logger.warning("All userdata backed up?")

    if arg_force or input("Reset database? [y/N]") == "y":
        with app.app_context():
            logger.info("Drop and recreate...")
            db.drop_all(app=app)
            db.create_all(app=app)
    else:
        logger.info("Nothing was changed")
    logger.info("OK")
Ejemplo n.º 11
0
def call_api():
    hubspot = create_client()
    try:
        hubspot.crm.contacts.basic_api.get_page()
        logger.info("Requesting get_page: success")
    except ApiException as e:
        logger.error("Exception occurred, status code: ".format(e.status))


if not is_authorized():
    print(
        "In order to continue please go to http://localhost:5000 and authorize via OAuth."
    )
    print("Then return back")

    while True:
        if not is_authorized():
            time.sleep(3)
        else:
            break

try:
    while True:
        try:
            call_api()
        except RateLimited:
            logger.warning("Rate limit reached, sleeping...")
            time.sleep(0.5 + random.random())
except KeyboardInterrupt:
    sys.exit(0)