def get(self): """Return base data set required by the web client.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) rv = {"data": dict()} # Elections try: elections = db.session.query(Election).all() except SQLAlchemyError as e: logger.error(e) return json_response({"error": "Server Error"}) rv["data"]["elections"] = defaultdict(list) for election in elections: rv["data"]["elections"][election.territory].append( election.to_dict(thesis_data=False)) # Tags tagItems = (db.session.query(Tag, func.count(Thesis.id)).join( Tag.theses).group_by(Tag.title).all()) rv["data"]["tags"] = [ item[0].to_dict( thesis_count=item[1], query_root_status=True, include_related_tags="simple", ) for item in tagItems ] return json_response(rv)
def close_connection(self): logger.info('solicitado cerrar conexión con la base de datos') try: self.connection.close() except Exception as e: logger.error(f'error cerrando conexión con la base de datos: {e}') return {"code": 1, "error": e}
def get(self, filename=None): """List all tags.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) if request.args.get("include_theses_ids", False) or filename != None: results = (db.session.query(Tag).join(Tag.theses).group_by( Tag.title).order_by(Tag.title).all()) rv = { "data": [tag.to_dict(include_theses_ids=True) for tag in results] } else: results = (db.session.query(Tag, func.count(Thesis.id)).join( Tag.theses).group_by(Tag.title).all()) rv = { "data": [item[0].to_dict(thesis_count=item[1]) for item in results] } return json_response(rv, filename=filename)
def call_api(): hubspot = create_client() try: hubspot.crm.contacts.basic_api.get_page() logger.info("Requesting get_page: success") except ApiException as e: logger.error("Exception occurred, status code: ".format(e.status))
def load_quiz_answers(): """Load quiz answers from quiz_answers.json""" try: with open("../userdata/quiz_answers.json") as f: qa_export = json.load(f) except FileNotFoundError: logger.warning( "File ../userdata/quiz_answers.json not found - quiz answers were not" + "imported" ) return assert qa_export["meta"]["api"] == API_FULL_NAME logger.info("Adding {} quiz answers...".format(len(qa_export["data"]))) for qa_data in qa_export["data"]: d = dateutil.parser.parse(qa_data["date"]).date() qa = QuizAnswer( thesis_id=qa_data["thesis"], uuid=qa_data["uuid"], date=d, answer=qa_data["answer"], ) yield qa
def call_api(): # Pay attention on create_client. # It generates a client with reties middlewares. hubspot = create_client() try: page = hubspot.crm.contacts.basic_api.get_page() if os.getppid() == 0: logger.info("Requesting get_page: success") except ApiException as e: logger.error("Exception occurred, status code: ".format(e.status))
def get(self, thesis_id: str): """Return metadata for a specific thesis.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) thesis = Thesis.query.get(thesis_id) if thesis is None: return json_response({"error": "Thesis not found"}, status=404) rv = {"data": thesis.to_dict(), "related": thesis.related()} return json_response(rv)
def get(self, wom_id: int): """Election data and a list of its theses.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) election = Election.query.get(wom_id) if election is None: return json_response({"error": "Election not found"}, status=404) rv = { "data": election.to_dict(), "theses": [thesis.to_dict() for thesis in election.theses], } return json_response(rv)
async def update_registro(request): logger.info('solicitado endpoint de insertar un nuevo registro') params = await request.json() if 'id' not in params: logger.warning(f'no se ha pasado el parametro id') return web.Response(status=400, text=f'el parametro id es obligatorio') if auth.user == 'rasp': r = insert(params['id']) else: r = insert(params['id'], oficina=0) if r != 0: logger.warning( f'ha ocurido un error insertando registo en la base de datos: {r}') return web.Response( status=500, text=f'error interno insertando registro en la base de datos') return web.Response(status=200, text='registro guardado correctamente')
def get(self, slug: str): """Tag metadata, list of all related theses and their elections.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) tag = db.session.query(Tag).filter(Tag.slug == slug.lower()).first() if tag is None: return json_response({"error": "Tag not found"}, status=404) rv = { "data": tag.to_dict(include_related_tags="full"), "theses": [thesis.to_dict() for thesis in tag.theses], "elections": { thesis.election_id: thesis.election.to_dict() for thesis in tag.theses }, } return json_response(rv)
def get(self): """A list of all elections.""" if not is_cache_filler(): logger.info("Cache miss for {}".format(request.path)) try: elections = Election.query.all() except SQLAlchemyError as e: logger.error(e) return json_response({"error": "Server Error"}) thesis_data = request.args.get("thesis_data", False) rv = {"data": defaultdict(list)} for election in elections: rv["data"][election.territory].append( election.to_dict(thesis_data=thesis_data)) return json_response(rv)
def insert_registro(self, id, timestamp, oficina): logger.info(f'solicitado añadir registro a la base de datos', color="amarillo") cmd = f""" INSERT INTO registros (timestamp, id, oficina) VALUES ("{timestamp}", "{id}", "{oficina}"); """ try: self.cursor.execute(cmd) result = self.cursor.fetchall() except Exception as e: logger.error(f'error ejecutando comando {cmd} en la bbdd: {e}') return { "code": 1, "error": f'error ejecutando comando {cmd} en la bbdd: {e}' } logger.info(f'registro añadido correctamente a la base de datos', color='azul') return {"code": 0, "error": ''}
def load_tags(): """Load tags from exported tags.json.""" try: with open("../userdata/tags.json") as f: tag_export = json.load(f) except FileNotFoundError: logger.warning("File ../userdata/tags.json not found - tags were not imported") return if tag_export["meta"]["api"] != API_FULL_NAME: raise ValueError("Tag export has Version '{}' but should be '{}'".format(tag_export["meta"]["api"], API_FULL_NAME)) logger.info("Adding {} tags...".format(len(tag_export["data"]))) for tag_data in tag_export["data"]: tag = Tag( title=tag_data["title"], slug=tag_data["slug"], url=tag_data["url"], wikidata_id=tag_data["wikidata_id"], ) tag.description = tag_data.get("description", None) tag.wikipedia_title = tag_data.get("wikipedia_title", None) tag.labels = ";".join(tag_data.get("labels", [])) tag.aliases = ";".join(tag_data.get("aliases", [])) tag.image = tag_data.get("image", None) if tag.description is not None and len(tag.description) > 1: # Always start with upper case tag.description = tag.description[0].upper() + tag.description[1:] # Remove non-descriptions if tag.description.startswith("Wikimedia-"): tag.description = None for thesis_id in tag_data["theses"]: tag.theses.append(Thesis.query.get(thesis_id)) yield tag
def post(self, thesis_id: str): log_request_info("Thesis tags update", request) thesis = db.session.query(Thesis).get(thesis_id) data = request.get_json() error = None if thesis is None: return json_response({"error": "Thesis not found"}, status=404) if data is None or data.get("admin_key", "") != current_app.config.get("ADMIN_KEY"): logger.warning("Invalid admin key") error = "Invalid admin key" else: for tag_data in data.get("add", []): tag = (db.session.query(Tag).filter( Tag.wikidata_id == tag_data["wikidata_id"]).first()) if tag is None: tag = db.session.query(Tag).filter_by( title=tag_data["title"]).first() if tag is None: tag = Tag( description=tag_data.get("description", None), title=tag_data["title"], url=tag_data["url"], wikidata_id=tag_data["wikidata_id"], image=tag_data.get("image", None), ) tag.make_slug() logger.info("New tag {}".format(tag)) tag.wikipedia_title = tag_data.get("wikipedia_title", None) tag.labels = ";".join(tag_data.get("labels", [])) tag.aliases = ";".join(tag_data.get("aliases", [])) logger.info("Appending {} to {}".format(tag, thesis)) thesis.tags.append(tag) if len(data.get("remove", [])) > 0: logger.info("Removing tags {}".format(", ".join( data.get("remove")))) thesis.tags = [ tag for tag in thesis.tags if tag.title not in data.get("remove") ] db.session.add(thesis) db.session.commit() if error is not None: return json_response({"error": error}) else: return json_response({"data": thesis.to_dict(), "error": error})
import datetime from services.kafka import get_consumer from services.logger import logger from services.db import Event, session consumer = get_consumer() for message in consumer: message = message.value logger.info(message) event = Event() event.event_type = message["subscriptionType"] event.event_id = message["eventId"] event.object_id = message["objectId"] event.occurred_at = datetime.datetime.fromtimestamp( message["occurredAt"] // 1000) if "propertyName" in message: event.property_name = message["propertyName"] if "propertyValue" in message: event.property_value = message["propertyValue"] session.add(event) session.commit()
from sqlalchemy.schema import DropTable from sqlalchemy.ext.compiler import compiles sys.path.append("./app/") from main import create_app from services import db from services.logger import logger # Add `CASCADE` to drop table statement # https://stackoverflow.com/a/38679457 @compiles(DropTable, "postgresql") def _compile_drop_table(element, compiler, **kwargs): return compiler.visit_drop_table(element) + " CASCADE" if __name__ == '__main__': app = create_app() arg_force = "--force" in sys.argv logger.warning("All userdata backed up?") if arg_force or input("Reset database? [y/N]") == "y": with app.app_context(): logger.info("Drop and recreate...") db.drop_all(app=app) db.create_all(app=app) else: logger.info("Nothing was changed") logger.info("OK")
party=party, votes=match["votes"] if "votes" in match else None, pct=match["pct"], source_url=res["url"], source_name="Tagesschau Wahlarchiv", wom=False, ) if __name__ == "__main__": app = create_app() with app.app_context(): try: for obj in load_elections(): db.session.add(obj) logger.info("Added {}".format(obj)) for result in load_results(): db.session.add(result) for tag in load_tags(): db.session.add(tag) for quiz_answer in load_quiz_answers(): db.session.add(quiz_answer) logger.info("Committing session to disk...") db.session.commit() except: db.session.rollback() raise
def registros(self, client): logger.info(f'solicitando registros a la base de datos', color="amarillo") cmd = '' if client == 'julio' or client == 'jesus': logger.info( f'solicitando id de todos los usuarios a la base de datos', color="blanco") cmd_0 = f'SELECT * FROM RFID.nombres;' try: self.cursor.execute(cmd_0) ids = self.cursor.fetchall() except Exception as e: logger.error( f'error ejecutando comando {cmd_0} en la bbdd: {e}') return { "code": 1, "error": f'error ejecutando comando {cmd_0} en la bbdd: {e}', "result": '' } res = [] for element in ids: cmd = f'SELECT * FROM RFID.registros WHERE id ="{element["id"]}";' try: self.cursor.execute(cmd) result = self.cursor.fetchall() except Exception as e: logger.error( f'error ejecutando comando {cmd} en la bbdd: {e}') return { "code": 1, "error": f'error ejecutando comando {cmd} en la bbdd: {e}', "result": '' } res.append({element['nombre']: result}) logger.info(f'registros obtenidos correctamente', color='azul') return {"code": 0, "error": '', "result": res} else: logger.info(f'solicitando id del usuario a la base de datos', color="blanco") cmd_0 = f'SELECT * FROM RFID.nombres WHERE nombre ="{client}";' try: self.cursor.execute(cmd_0) id = self.cursor.fetchall()[0]['id'] except Exception as e: logger.error( f'error ejecutando comando {cmd_0} en la bbdd: {e}') return { "code": 1, "error": f'error ejecutando comando {cmd_0} en la bbdd: {e}', "result": '' } cmd = f'SELECT * FROM RFID.registros WHERE id ="{id}";' try: self.cursor.execute(cmd) result = self.cursor.fetchall() except Exception as e: logger.error(f'error ejecutando comando {cmd} en la bbdd: {e}') return { "code": 1, "error": f'error ejecutando comando {cmd} en la bbdd: {e}', "result": '' } logger.info(f'registros obtenidos correctamente', color='azul') return {"code": 0, "error": '', "result": result}
def load_results(): """Match election records to the existing election datasets.""" logger.info("Matching election results...") with open("../wahlergebnisse/wahlergebnisse.json") as f: result_data = json.load(f) with open("../userdata/substitutions.json") as f: substitutions = defaultdict(list) substitutions.update(json.load(f)) for occ in db.session.query(Election).all(): dt = occ.date.date() occ_results = [ o for o in result_data if o["territory"].lower().startswith(occ.territory.lower()[:2]) and dateutil.parser.parse(o["date"]).date() == dt ] matched_results = set() if len(occ_results) == 0: logger.error("Didn't find results for {}. Removing from db..".format(occ)) for th in occ.theses: for pos in th.positions: db.session.delete(pos) db.session.delete(th) db.session.delete(occ) else: res = occ_results[0] if "preliminary" in res and res["preliminary"] == True: logger.warning("Marking {} as preliminary".format(occ)) occ.preliminary = True yield occ parties = set([p.party for p in occ.theses[0].positions]) for p in parties: options = [p.name.lower()] + list(map(str.lower, substitutions[p.name])) matches = [ (name, result) for name, result in res["results"].items() if name.lower() in options ] if len(matches) > 0: for match in matches: if match[0].lower() != p.name.lower(): logger.warning( "Assigned WOM text from {} to election result of {} in {}".format( p, match[0], res["title"] ) ) matched_results.add(match[0]) votes = match[1]["votes"] if "votes" in match[1] else None yield Result( election=occ, party=p, party_repr=match[0], votes=votes, pct=match[1]["pct"], source_url=res["url"], source_name="Tagesschau Wahlarchiv" ) else: if occ.preliminary: logger.info("{} missing vote count for {}".format(occ, p)) else: logger.error("No vote count for {} in {}".format(p, occ)) # Add results missing in Wahl-o-Mat for p_name, match in res["results"].items(): if p_name in list(matched_results): continue # Try and assign a unified party instance to this election # result to merge parties that have changed their name over # time party = None if p_name in party_instances.keys(): party = party_instances[p_name] else: for (name, subs) in substitutions.items(): if p_name in subs: if name in party_instances.keys(): party = party_instances[name] logger.info( "Linked statement {} to election result of '{}' in {}".format( party, p_name, res["title"] ) ) break if party is None: party = Party(name=p_name) party_instances[p_name] = party yield Result( election=occ, party_repr=p_name, party=party, votes=match["votes"] if "votes" in match else None, pct=match["pct"], source_url=res["url"], source_name="Tagesschau Wahlarchiv", wom=False, )
import os from services.logger import logger from helpers.search import search_next_contacts_batch def iterate_via_search_results(search_query, process_contact_func): after = 0 while True: contacts = search_next_contacts_batch( search_query=search_query, after=after, limit=os.getenv("SEARCH_BATCH_SIZE")) if len(contacts) == 0: break for contact in contacts: process_contact_func(contact) after = contacts[-1].id if __name__ == "__main__": iterate_via_search_results( search_query=os.getenv("SEARCH_QUERY"), process_contact_func=lambda contact: logger.info( "Processing contact_id={}".format(contact.id)), )