Example #1
0
def api_sensor():
    if request.method == 'GET':
        # Read sensor
        # GET /api/sensor
        sensors = g.session.query(db.models.sensor).order_by(db.models.sensor.id).all()
        return json.dumps([utils.row2dict(sensor) for sensor in sensors])
    elif request.method == 'POST':
        # Create sensor
        # POST /api/sensor
        # {name:<string>, df_name:<string>, alias:<string>, unit:<string>,
        #  icon:<string>, bg_color:<string>}
        df_name = request.json.get('df_name')
        if not df_name:
            return 'No df_name'
        if not request.json.get('name'):
            return 'No name'

        new_sensor = db.models.sensor(df_name=df_name,
                                      name=request.json.get('name'),
                                      alias=request.json.get('alias'),
                                      unit=request.json.get('unit'),
                                      icon=request.json.get('icon'),
                                      bg_color=request.json.get('bg_color'))
        g.session.add(new_sensor)
        g.session.commit()

        db.inject_new_model(df_name.replace('-O', ''))

        return json.dumps(utils.row2dict(new_sensor))
    elif request.method == 'PUT':
        # Update sensor
        # PUT /api/sensor
        # {id:<int>, name:<string>, df_name:<string>, alias:<string>,
        #  unit:<string>, icon:<string>, bg_color:<string>}
        id_ = request.json.get('id')
        (g.session
          .query(db.models.sensor)
          .filter(db.models.sensor.id == id_)
          .update(request.json))
        g.session.commit()

        return 'ok'
    elif request.method == 'DELETE':
        # Delete sensor
        # DELETE /api/sensor?id=<id>
        id_ = request.args.get('id')
        (g.session
          .query(db.models.sensor)
          .filter(db.models.sensor.id == id_)
          .delete())
        g.session.commit()

        return 'ok'

    abort(404)
Example #2
0
def join_sites_localisation():
    """
    Réalise une jointure entre la table sites et la table localisation
    """

    # datasets to join
    basias_sites_prepared = Dataset("etl", "basias_sites_prepared")
    basias_localisation_with_cadastre = Dataset(
        "etl", "basias_localisation_with_cadastre")

    # output dataset
    basias_sites_localisation_joined = Dataset(
        "etl", "basias_sites_localisation_joined")

    # transform types
    output_dtype = [
        *basias_sites_prepared.read_dtype(),
        *basias_localisation_with_cadastre.read_dtype()
    ]

    output_dtype = merge_dtype(output_dtype)

    basias_sites_localisation_joined.write_dtype(output_dtype)

    # transform data
    BasiasSitesPrepared = basias_sites_prepared.reflect()
    BasiasLocalisation = basias_localisation_with_cadastre.reflect()

    session = basias_sites_prepared.get_session()

    join_query = session \
        .query(BasiasSitesPrepared, BasiasLocalisation) \
        .join(BasiasLocalisation,
              BasiasSitesPrepared.indice_departemental ==
              BasiasLocalisation.indice_departemental,
              isouter=True) \
        .all()

    with basias_sites_localisation_joined.get_writer() as writer:
        for (site, localisation) in join_query:
            output_row = {c.name: None for c in output_dtype}
            output_row = {**output_row, **row2dict(site)}
            if localisation:
                output_row = {**output_row, **row2dict(localisation)}
            del output_row["id"]
            writer.write_row_dict(output_row)

    session.close()
Example #3
0
def login():
    """ verifies user payment_id and logs them in for editing """
    real = db.session.query(
        User.id).filter_by(payment_id=request.form["payment_id"]).scalar()
    paid = db.session.query(
        Payment.id).filter_by(payment_id=request.form["payment_id"]).scalar()

    if real is not None and paid is not None:
        resp = redirect(url_for("edit"))
        resp.set_cookie('sessiontoken', request.form["payment_id"])
        return json.dumps({
            'status': 'Success',
            'data': 'Login succeeded'
        }), 200
    if real is None:
        return json.dumps({
            'status': 'Fail',
            'reason': "Payment ID doesn't exist!"
        }), 500
    if paid is None:
        user = row2dict(
            User.query.filter(
                payment_id == request.form["payment_id"]).first())
        return json.dumps({
            'status': 'Fail',
            'reason': "Payment ID hasn't been paid",
            'price': user["price"]
        }), 500
Example #4
0
def read_rate(CID=None, GID=None, conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')

	if CID == None and GID == None:
		stmt = sql.select([rate]).\
			select_from(rate).\
			order_by(rate.c.CID, rate.c.GID)
	elif CID != None and GID == None:
		stmt = sql.select([rate]).\
			select_from(rate).\
			where(rate.c.CID == CID).\
			order_by(rate.c.CID, rate.c.GID)
	elif CID == None and GID != None:
		stmt = sql.select([rate]).\
			select_from(rate).\
			where(rate.c.GID == GID).\
			order_by(rate.c.CID, rate.c.GID)

	rets = []
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)
		rets.append(row_data)

	if conn_ is None:
		_conn.close()
	return rets
def add_or_modify_vote():
    try:
        data = request.get_json()
        param = {
            'post_id': data['post_id'],
            'user_id': data['user_id'],
            'value': data['value']
        }
    except BadRequestKeyError:
        return make_response(jsonify({"code": 400, "msg": "Request is missing fields"}), 400)

    vote = Vote.query.filter_by(post_id=param['post_id'], user_id=param['user_id']).first()
    if vote:
        try:
            value = int(param['value'])
            vote.value = value
        except TypeError:
            return make_response(jsonify({'code': 400, 'msg': 'Value provided is not an integer'}))
    else:
        vote = Vote(**param)
    db.session.add(vote)

    try:
        db.session.commit()
    except sqlalchemy.exc.SQLAlchemyError as e:
        return make_response(jsonify({"code": 404, "msg": e}), 404)
    return jsonify(row2dict(vote))
Example #6
0
def add_a_good(rows_, conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')

	old_rows = []
	dd = rows_diff_by_seq(seq(old_rows), seq(rows_), old_rows, rows_,
						  incl=['NAME'],
						  ind=['NAME'], rplc=False,  fillback=False)
	rows = db_utils.db_operate_dict(_conn, goods, dd, pk=['ID'])

	# get good id
	stmt = sql.select([sqlite_sequence.c.seq]).\
			select_from(sqlite_sequence).\
			where(sqlite_sequence.c.name == 'GOODS')

	row_data = None
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)

	if conn_ is None:
		_conn.close()
	return row_data['seq']


	if conn_ is None:
		_conn.close()
	return goods_dict
Example #7
0
def render_demo_template(*args, **argv):
    target_field = argv.get('field')
    if not target_field:
        abort(404)

    field_record = (g.session
                     .query(db.models.field)
                     .filter(db.models.field.name == target_field)
                     .first())
    if not field_record:
        abort(404)

    sensors = []
    sensor_records = (g.session
                       .query(db.models.field_sensor)
                       .filter(db.models.field_sensor.field == field_record.id)
                       .order_by(db.models.field_sensor.id)
                       .all())
    for sensor in sensor_records:
        sensors.append(utils.row2dict(sensor))

    return flast_render_template(*args,
                                 fieldname=target_field,
                                 sensors=sensors,
                                 **argv)
Example #8
0
def join_cadastre():
    """ Join the table basol_cadastre with cadastre table """

    # Input datasets
    basol_cadastre = Dataset("etl", "basol_cadastre")
    cadastre = Dataset("etl", "cadastre")

    # Output datasets
    basol_cadastre_joined = Dataset("etl", "basol_cadastre_joined")

    basol_cadastre_dtype = basol_cadastre.read_dtype()
    dtype = [*basol_cadastre_dtype, Column("geog", Geometry(srid=4326))]
    basol_cadastre_joined.write_dtype(dtype)

    BasolCadastre = basol_cadastre.reflect()
    Cadastre = cadastre.reflect()

    session = basol_cadastre.get_session()

    cond = (BasolCadastre.commune == Cadastre.commune) & \
           (BasolCadastre.section == Cadastre.section) & \
           (BasolCadastre.numero == Cadastre.numero)

    q = session.query(BasolCadastre, Cadastre.geog) \
        .join(Cadastre, cond) \
        .yield_per(500)

    with basol_cadastre_joined.get_writer() as writer:

        for (basol, geog) in q:
            output_row = {**row2dict(basol), "geog": geog}
            output_row.pop("id")
            writer.write_row_dict(output_row)

    session.close()
Example #9
0
def add_parcels():
    """ join table basol_intersected with basol_cadastre_merged """

    # input datasets
    basol_intersected = Dataset("etl", "basol_intersected")
    basol_cadastre_merged = Dataset("etl", "basol_cadastre_merged")

    # output datasets
    basol_with_parcels = Dataset("etl", "basol_with_parcels")

    BasolIntersected = basol_intersected.reflect()
    BasolCadastreMerged = basol_cadastre_merged.reflect()

    dtype = basol_intersected.read_dtype()
    basol_with_parcels.write_dtype(dtype)

    session = basol_intersected.get_session()

    cond = (BasolIntersected.numerobasol == BasolCadastreMerged.numerobasol)
    q = session.query(BasolIntersected, BasolCadastreMerged.geog) \
               .join(BasolCadastreMerged, cond, isouter=True) \
               .all()

    with basol_with_parcels.get_writer() as writer:
        for (row, geog) in q:
            if geog is not None:
                row.geog_precision = precisions.PARCEL
                row.geog_source = "cadastre"
                row.geog = geog
            writer.write_row_dict(row2dict(row))

    session.close()
def put_new_post():
    data = request.get_json()

    try:
        params = {
            'author_id': data['user_id'],
            'url_one': data['url_one'],
            'url_two': data['url_two'],
            'title': data['title']
        }
    except:
        return make_response(
            jsonify({
                'code': 400,
                'msg': 'Badly formed request, parameters are missing'
            }), 400)

    p = Post(**params)
    db.session.add(p)
    try:
        db.session.commit()
    except exc.IntegrityError:
        error = 'wrong parameters sent\n'
        return make_response(jsonify({"code": 400, "error": error}), 400)
    return make_response(jsonify({"code": 200, "post": row2dict(p)}), 200)
Example #11
0
def merge_geog():
    """
    Choose best precision between initial coordinates
    or geocoded coordinates if geog is not set from
    cadastre information
    """

    # Input dataset
    basol_geocoded = Dataset("etl", "basol_normalized")

    # Output dataset
    basol_geog_merged = Dataset("etl", "basol_geog_merged")

    basol_geog_merged.write_dtype([
        *basol_geocoded.read_dtype(),
        Column("geog", Geometry(srid=4326)),
        Column("geog_precision", String),
        Column("geog_source", String)
    ])

    BasolGeocoded = basol_geocoded.reflect()

    session = basol_geocoded.get_session()

    point_lambert2 = func.ST_Transform(
        func.ST_setSRID(
            func.ST_MakePoint(BasolGeocoded.coordxlambertii,
                              BasolGeocoded.coordylambertii), LAMBERT2), WGS84)

    point_geocoded = func.ST_setSRID(
        func.ST_MakePoint(BasolGeocoded.geocoded_longitude,
                          BasolGeocoded.geocoded_latitude), WGS84)

    q = session.query(BasolGeocoded, point_lambert2, point_geocoded).all()

    with basol_geog_merged.get_writer() as writer:

        for (row, point_lambert2, point_geocoded) in q:

            output_row = {
                **row2dict(row), "geog": None,
                "geog_precision": None,
                "geog_source": None
            }

            if row.l2e_precision == precisions.HOUSENUMBER:

                output_row["geog"] = point_lambert2
                output_row["geog_precision"] = row.l2e_precision
                output_row["geog_source"] = "lambert2"

            elif (row.geocoded_result_type == precisions.HOUSENUMBER) and \
                 (row.geocoded_result_score >= 0.6):
                output_row["geog"] = point_geocoded
                output_row["geog_precision"] = row.geocoded_result_type
                output_row["geog_source"] = "geocodage"

            writer.write_row_dict(output_row)

    session.close()
Example #12
0
def userpage(user_url):
    """ renders the shareable URL, checks if the page has been paid for """
    exists = db.session.query(User.id).filter_by(url=user_url).scalar()
    if exists is not None:
        user = row2dict(User.query.filter(User.url == user_url).first())
        paid = db.session.query(
            Payment.id).filter_by(payment_id=user["payment_id"]).scalar()
        user.pop('payment_id', None)
        if paid is not None:
            return render_template("user.html", user=user)
        return render_template("waiting.html", user=user)
    return abort(404)
Example #13
0
def edit_page(user_url):
    """ renders the user edit page """
    pay_id = request.cookies.get('sessiontoken')
    if pay_id is not None:
        exists = User.query.filter_by(payment_id=pay_id).exists().scalar()
        if exists is not None:
            user = row2dict(User.query.filter(payment_id == pay_id).first())
            return render_template("edit.html", user=user)
        return json.dumps({
            'status': 'Fail',
            'reason': 'Invalid user authentication'
        }), 500
    return render_template("login.html")
Example #14
0
def read_goods(conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')
	stmt = sql.select([goods]).\
		select_from(goods)

	rets = []
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)
		rets.append(row_data)

	if conn_ is None:
		_conn.close()
	return rets
Example #15
0
def add_geog():
    """
    Cette recette réalise une jointure avec la table cadastre
    pour nettoyer les parcelles invalide.

    On utilise la table cadastre du schéma kelrisks
    et non la table du schéma etl car il n'y a pas assez
    de stockage sur le serveur pour avoir 4 tables cadastre
    (preprod `etl`, preprod `kelrisks`, prod `etl`, prod `kelrisks`).
    On est donc obligé de supprimer les tables cadastres
    du schéma `etl` après les avoir copié dans le schéma `kelrisks`.
    La table `etl.cadastre` n'existe donc pas forcément au moment
    où ce DAG est executé.
    """

    # Input dataset
    basias_cadastre_parsed = Dataset("etl", "basias_cadastre_parsed")
    cadastre = Dataset("kelrisks", "cadastre")

    # Output dataset
    basias_cadastre_with_geog = Dataset("etl", "basias_cadastre_with_geog")

    BasiasCadastreParsed = basias_cadastre_parsed.reflect()
    Cadastre = cadastre.reflect()

    basias_cadastre_with_geog.write_dtype([
        *basias_cadastre_parsed.read_dtype(),
        Column("geog", Geometry(srid=4326))
    ])

    session = basias_cadastre_parsed.get_session()

    cond = (BasiasCadastreParsed.commune == Cadastre.commune) & \
           (BasiasCadastreParsed.section == Cadastre.section) & \
           (BasiasCadastreParsed.numero == Cadastre.numero)

    q = session.query(BasiasCadastreParsed, Cadastre.geog) \
        .join(Cadastre, cond) \
        .yield_per(500)

    with basias_cadastre_with_geog.get_writer() as writer:

        for (row, geog) in q:
            output_row = {**row2dict(row), "geog": None}
            del output_row["id"]
            if geog is not None:
                output_row["geog"] = geog

            writer.write_row_dict(output_row)

    session.close()
Example #16
0
    def GET(self):
        user = self.valid()
        if not user:
            self.redirect('/login')
        #words = web.ctx.orm.query(WordsBook).filter(WordsBook.userid == user.userid).order_by(desc(WordsBook.date)).offerset((page-1)*15).limit(15).all()
        query = web.ctx.query

        args = url_query_parser(query)
        page = int(args['page'])

        per_page_count = 10

        words_count = web.ctx.orm.query(func.count('*')).filter(WordsBook.userid == user.userid).scalar()

        all_page = words_count //per_page_count
        if words_count % per_page_count > 0:
            all_page = all_page +1
        def has_prev_page(curr_page):
            #using int()
            if int(curr_page)>1:
                return True
            else:
                return False

        def has_next_page(curr_page,all_page):
            if int(curr_page)  < all_page :
                return True
            else:
                return False

        pageval = get_pageval(page,per_page_count)
        
        words = web.ctx.orm.query(WordsBook).filter(WordsBook.userid == user.userid).order_by(desc(WordsBook.date)).limit(per_page_count).offset(pageval).all()
        word_list = []
        for word in words:
            word = row2dict(word)
            word['date'] = time.mktime(word['date'].timetuple())
            word_list.append(word)

        pager = {
            'current_page': page,
            'all_page':all_page,
            'has_prev_page': has_prev_page(page),
            'has_next_page': has_next_page(page, all_page)
        }
        return json.dumps({
            'pager':pager,
            'words':word_list
            })
def get_post_by_id(post_id: int):
    post = Post.query.filter_by(id=post_id).first()

    json_post = row2dict(post)
    user_id = convert_value_or_none(request.args.get('user_id'))
    if user_id is not None:
        add_votes_to_post_json_list([json_post], user_id)

    if post:
        return jsonify({'code': 200, 'post': json_post})
    else:
        return make_response(
            jsonify({
                'code': 404,
                'msg': 'Cannot find this post.'
            }), 404)
def update_vote(post_id, user_id):
    vote = Vote.query.filter_by(post_id=post_id, user_id=user_id).first()
    if vote:
        if 'value' in request.form:
            vote.value = request.form['value']
        try:
            db.session.commit()
        except sqlalchemy.exc.SQLAlchemyError as e:
            error = "Cannot update vote.\n"
            print(app.config.get("DEBUG"))
            if app.config.get("DEBUG"):
                error += str(e)
            return make_response(jsonify({"code": 400, "msg": error}), 404)
        return jsonify(row2dict(vote))
    else:
        return make_response(jsonify({"code": 404, "msg": "Cannot find this vote."}), 404)
Example #19
0
def add_commune():
    """
    Ajoute le contour des communes comme nouvelle valeur pour
    geog dans le cas où la précision est MUNICIPALITY

    La reqête SqlAlchemy est équivalente à

    SELECT *
    FROM etl.basias_sites_localisation_joined A
    LEFT JOIN etl.commune B
    ON A.numero_insee == B.insee
    """

    # input dataset
    basias_sites_localisation_joined = Dataset(
        "etl", "basias_sites_localisation_joined")
    communes = Dataset("etl", "commune")

    # output dataset
    basias_sites_with_commune = Dataset("etl", "basias_sites_with_commune")

    dtype = basias_sites_localisation_joined.read_dtype()
    basias_sites_with_commune.write_dtype(dtype)

    BasiasSites = basias_sites_localisation_joined.reflect()
    Commune = communes.reflect()

    session = basias_sites_localisation_joined.get_session()

    q = session.query(BasiasSites, Commune.geog) \
               .join(Commune,
                     BasiasSites.numero_insee == Commune.insee,
                     isouter=True) \
               .all()

    with basias_sites_with_commune.get_writer() as writer:

        for (row, commune) in q:

            if row.geog is None:
                row.geog = commune
                row.geog_precision = precisions.MUNICIPALITY
                row.geog_source = "numero_insee"

            writer.write_row_dict(row2dict(row))

    session.close()
def add_commune():
    """
    Ajoute le contour des communes comme nouvelle valeur pour
    geog dans le cas où la précision est MUNICIPALITY

    La reqête SqlAlchemy est équivalente à

    SELECT *
    FROM etl.s3ic_with_parcelle A
    LEFT JOIN etl.commune B
    ON A.cd_insee == B.insee
    """

    # input dataset
    s3ic_with_parcelle = Dataset("etl", "s3ic_with_parcelle")
    communes = Dataset("etl", "commune")

    # output dataset
    s3ic_with_commune = Dataset("etl", "s3ic_with_commune")

    dtype = s3ic_with_parcelle.read_dtype()
    s3ic_with_commune.write_dtype(dtype)

    S3icWithParcelle = s3ic_with_parcelle.reflect()
    Commune = communes.reflect()

    session = s3ic_with_parcelle.get_session()

    q = session.query(S3icWithParcelle, Commune.geog) \
               .join(Commune,
                     S3icWithParcelle.code_insee == Commune.insee,
                     isouter=True) \
               .all()

    with s3ic_with_commune.get_writer() as writer:

        for (row, commune) in q:

            if row.geog_precision == precisions.MUNICIPALITY:
                row.geog = commune
                row.precision = precisions.MUNICIPALITY
                row.geog_source = "code_insee"

            writer.write_row_dict(row2dict(row))

    session.close()
Example #21
0
def read_customer(cus_id=None, conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')

	if not cus_id:
		stmt = sql.select([customer]).\
			select_from(customer)
	else:
		stmt = sql.select([customer]).\
			select_from(customer).\
			where(customer.c.ID == cus_id)

	rets = []
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)
		rets.append(row_data)

	if conn_ is None:
		_conn.close()
	return rets
Example #22
0
def render_template(*args, **argv):
    fields = []
    query = (g.session
              .query(db.models.field.id,
                     db.models.field.name,
                     db.models.field.alias,
                     db.models.field.iframe,
                     db.models.user_access.is_active)
              .select_from(db.models.field)
              .join(db.models.user_access)
              .join(db.models.user)
              .filter(db.models.user.username == session.get('username'))
              .order_by(db.models.user_access.id)
              .all())
    for id, name, alias, iframe, is_active in query:
        field_data = {
            'id': id,
            'name': name,
            'alias': alias,
            'iframe': iframe.replace('{username}', session.get('username')),
            'is_active': 1 & is_active,
            'sensors': []
        }
        for sensor in (g.session
                        .query(db.models.field_sensor)
                        .filter(db.models.field_sensor.field == id)
                        .order_by(db.models.field_sensor.id)
                        .all()):
            field_data['sensors'].append(utils.row2dict(sensor))
        fields.append(field_data)

    user = (g.session
             .query(db.models.user)
             .filter(db.models.user.username == session.get('username'))
             .first())

    return flast_render_template(*args,
                                 fields=fields,
                                 username=session.get('username'),
                                 is_superuser=session.get('is_superuser'),
                                 memo=user.memo,
                                 **argv)
Example #23
0
def update_all_pred_rate(rows_, conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')

	stmt = sql.select([rate]).\
		select_from(rate).\
		where(rate.c.REAL == 0)

	old_rows = []
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)
		old_rows.append(row_data)

	dd = rows_diff_by_seq(seq(old_rows), seq(rows_), old_rows, rows_,
						  incl=['CID','GID','RATE', 'REAL'],
						  ind=['CID','GID'], rplc=False,  fillback=False)
	rows = db_utils.db_operate_dict(_conn, rate, dd, pk=['CID','GID'])

	if conn_ is None:
		_conn.close()
	return rows
Example #24
0
def intersect():
    """
    Find the closest parcelle to the point and set it as
    new geography
    """

    # Input dataset
    basol_geog_merged = Dataset("etl", "basol_geog_merged")
    cadastre = Dataset("etl", "cadastre")

    # Output dataset
    basol_intersected = Dataset("etl", "basol_intersected")

    dtype = basol_geog_merged.read_dtype()
    basol_intersected.write_dtype(dtype)

    Cadastre = cadastre.reflect()

    BasolGeogMerged = basol_geog_merged.reflect()
    session = basol_geog_merged.get_session()

    stmt = session.query(Cadastre.geog) \
                  .filter(func.st_dwithin(
                      Cadastre.geog,
                      BasolGeogMerged.geog,
                      0.0001)) \
                  .order_by(func.st_distance(
                      Cadastre.geog,
                      BasolGeogMerged.geog)) \
                  .limit(1) \
                  .label("nearest")

    q = session.query(BasolGeogMerged, stmt).all()

    with basol_intersected.get_writer() as writer:
        for (row, cadastre_geog) in q:
            if cadastre_geog is not None:
                row.geog = cadastre_geog
            writer.write_row_dict(row2dict(row))

    session.close()
Example #25
0
def join_localisation_cadastre():
    """
    Réalise une jointure entre la table localisation et la table cadastre
    """

    # Input datasets
    basias_localisation_intersected = Dataset(
        "etl", "basias_localisation_intersected")
    basias_cadastre_merged = Dataset("etl", "basias_cadastre_merged")

    # Output dataset
    basias_localisation_with_cadastre = Dataset(
        "etl", "basias_localisation_with_cadastre")

    basias_localisation_with_cadastre.write_dtype(
        basias_localisation_intersected.read_dtype())

    BasiasLocalisationIntersected = basias_localisation_intersected.reflect()
    BasiasCadastreMerged = basias_cadastre_merged.reflect()

    session = basias_localisation_intersected.get_session()

    cond = BasiasCadastreMerged.indice_departemental \
        == BasiasLocalisationIntersected.indice_departemental

    q = session.query(
        BasiasLocalisationIntersected,
        BasiasCadastreMerged.geog) \
        .join(BasiasCadastreMerged, cond, isouter=True) \
        .all()

    with basias_localisation_with_cadastre.get_writer() as writer:
        for (row, cadastre_geog) in q:
            if cadastre_geog is not None:
                # replace geog with cadastre geog
                row.geog = cadastre_geog
                row.geog_precision = precisions.PARCEL
                row.geog_source = "cadastre"
            writer.write_row_dict(row2dict(row))
Example #26
0
def add_communes():
    """
    set commune geog for all records where geog is not set with
    a better precision
    """

    # input dataset
    basol_with_parcels = Dataset("etl", "basol_with_parcels")
    communes = Dataset("etl", "commune")

    # output dataset
    basol_with_communes = Dataset("etl", "basol_with_commune")

    dtype = basol_with_parcels.read_dtype()
    basol_with_communes.write_dtype(dtype)

    BasolWithParcels = basol_with_parcels.reflect()
    Commune = communes.reflect()

    session = basol_with_parcels.get_session()

    q = session.query(BasolWithParcels, Commune.geog) \
               .join(Commune,
                     BasolWithParcels.code_insee == Commune.insee,
                     isouter=True) \
               .all()

    with basol_with_communes.get_writer() as writer:

        for (row, commune) in q:

            if row.geog is None:
                row.geog = commune
                row.geog_precision = precisions.MUNICIPALITY
                row.geog_source = "code_insee"

            writer.write_row_dict(row2dict(row))

    session.close()
Example #27
0
def post_payment():
    """ processes form input for url registration """
    content = request.form
    exists = db.session.query(
        User.id).filter_by(url=content["url"]).scalar() is not None
    if exists:
        row = User.query.filter(User.url == content["url"]).first()
        result = row2dict(row)
        return json.dumps({
            'status': 'Fail',
            'reason': 'This URL has already been registered',
            'data': result['address']
        }), 500
    else:
        pay_id = get_payment_id()
        try:
            user = User(address=content["address"],
                        payment_id=pay_id,
                        url=content["url"],
                        message=content["message"],
                        turtlehash=content["hash"],
                        price=get_price(content["url"]))
            db.session.add(user)
            db.session.commit()
        except Exception as e:
            app.logger.info(traceback.format_exc())
            return json.dumps({
                'status':
                'Fail',
                'reason':
                'Your transaction could not be processed'
            }), 500

        return json.dumps({
            'status': 'Success',
            'payment_id': pay_id,
            'price': get_price(content["url"])
        }), 200
def get_all_post():
    page_size = convert_value_or_none(request.args.get('post_per_page'))
    page = convert_value_or_none(request.args.get('page'))
    user_id = convert_value_or_none(request.args.get('user_id'))

    full_json_posts = [row2dict(p) for p in Post.query.order_by(desc(Post.id))]
    if page is None or page_size is None:
        add_votes_to_post_json_list(full_json_posts, user_id)
        return jsonify({'code': 200, 'posts': full_json_posts})
    try:
        """
            In this line we do the pagination. 
            range(0, posts.count() will produce a list from 0 to posts.count() by increment of page_size.
            We then use those numbers to select the page of posts we want
        """
        posts = [
            full_json_posts[i:i + page_size]
            for i in range(0, len(full_json_posts), page_size)
        ]
        posts = posts[page]
        add_votes_to_post_json_list(posts, user_id)
        return jsonify({'code': 200, 'posts': posts})
    except IndexError:
        return jsonify({'code': 200, 'end': True})
Example #29
0
def add_a_customer(rows_, cus_id=None, conn_=None):
	_conn = db_utils.getconnection(conn_, db='rs')

	if cus_id:
		old_rows = read_customer(cus_id=cus_id,conn_=_conn)
	else:
		old_rows = []
	dd = rows_diff_by_seq(seq(old_rows), seq(rows_), old_rows, rows_,
						  incl=['ID','NAME'],
						  ind=['ID'], rplc=False,  fillback=False)
	rows = db_utils.db_operate_dict(_conn, customer, dd, pk=['ID'])

	# get cus id
	stmt = sql.select([sqlite_sequence.c.seq]).\
			select_from(sqlite_sequence).\
			where(sqlite_sequence.c.name == 'CUSTOMER')

	row_data = None
	for row in _conn.execute(stmt):
		row_data = db_utils.row2dict(row, has_none=True)

	if conn_ is None:
		_conn.close()
	return row_data['seq'] if cus_id == None else cus_id
Example #30
0
def stmt_to_dct(stmt_, conn_):
	rets = []
	for row in conn_.execute(stmt_):
		row_data = db_utils.row2dict(row, has_none=True)
		rets.append(row_data)
	return rets
Example #31
0
def api_field():
    if request.method == 'GET':

        fields = []
        query_fields = (g.session
                         .query(db.models.field)
                         .order_by(db.models.field.id)
                         .all())

        for field in query_fields:
            temp_field = utils.row2dict(field)
            query_field_sensor = (g.session
                                   .query(db.models.sensor.name,
                                          db.models.field_sensor.sensor,
                                          db.models.field_sensor.df_name,
                                          db.models.field_sensor.alias,
                                          db.models.field_sensor.unit,
                                          db.models.field_sensor.icon,
                                          db.models.field_sensor.bg_color,
                                          db.models.field_sensor.alert_min,
                                          db.models.field_sensor.alert_max)
                                   .select_from(db.models.field_sensor)
                                   .join(db.models.sensor)
                                   .filter(db.models.field_sensor.field == field.id)
                                   .order_by(db.models.field_sensor.id)
                                   .all())
            temp_field['sensors'] = []
            for sensor in query_field_sensor:
                temp_sensor = {
                    'name': sensor.name,
                    'sensor': sensor.sensor,
                    'df_name': sensor.df_name,
                    'alias': sensor.alias,
                    'unit': sensor.unit,
                    'icon': sensor.icon,
                    'bg_color': sensor.bg_color,
                    'alert_min': sensor.alert_min,
                    'alert_max': sensor.alert_max,
                }
                temp_field['sensors'].append(temp_sensor)
            fields.append(temp_field)

        return json.dumps(fields)
    elif request.method == 'POST':
        # Create field
        # POST /api/field
        # {name:<string>, alias:<string>, sensors: [<sensor>, ...]}
        if not request.json.get('name'):
            return 'No field name'

        new_field = db.models.field(name=request.json.get('name'),
                                    alias=request.json.get('alias'),
                                    iframe=request.json.get('iframe', ''))
        g.session.add(new_field)
        g.session.commit()

        for sensor in request.json.get('sensors', []):
            new_sensor = db.models.field_sensor(
                field=new_field.id,
                sensor=sensor.get('sensor'),
                df_name=sensor.get('df_name'),
                alias=sensor.get('alias'),
                unit=sensor.get('unit'),
                icon=sensor.get('icon'),
                bg_color=sensor.get('bg_color'),
                alert_min=sensor.get('alert_min'),
                alert_max=sensor.get('alert_max'))
            g.session.add(new_sensor)
            g.session.commit()

        return json.dumps(utils.row2dict(new_field))
    elif request.method == 'PUT':
        # Update field
        # PUT /api/field
        # {name:<string>, alias:<string>, sensors: [<sensor>, ...]}
        print(request.json)
        id_ = request.json.get('id')
        name = request.json.get('name')
        alias = request.json.get('alias')
        iframe = request.json.get('iframe', '')
        sensors = request.json.get('sensors', [])

        (g.session
          .query(db.models.field)
          .filter(db.models.field.id == id_)
          .update({'name': name,
                   'alias': alias,
                   'iframe': iframe}))
        (g.session
          .query(db.models.field_sensor)
          .filter(db.models.field_sensor.field == id_)
          .delete())
        for sensor in sensors:
            df_name = sensor.get('df_name')
            db.inject_new_model(df_name.replace('-O', ''))

            new_sensor = db.models.field_sensor(
                field=id_,
                sensor=sensor.get('sensor'),
                df_name=sensor.get('df_name'),
                alias=sensor.get('alias'),
                unit=sensor.get('unit'),
                icon=sensor.get('icon'),
                bg_color=sensor.get('bg_color'),
                alert_min=sensor.get('alert_min'),
                alert_max=sensor.get('alert_max'))
            new_sensor.field = id_
            new_sensor.id = None
            g.session.add(new_sensor)
            g.session.commit()
        g.session.commit()

        return 'ok'
    elif request.method == 'DELETE':
        # Delete field
        # DELETE /api/field?id=<id>
        id_ = request.args.get('id')

        for attr in db.models.__dict__.values():
            if inspect.isclass(attr) and hasattr(attr, 'timestamp'):
                (g.session
                  .query(attr)
                  .filter(attr.field == id_)
                  .delete())
        (g.session
          .query(db.models.field_sensor)
          .filter(db.models.field_sensor.field == id_)
          .delete())
        (g.session
          .query(db.models.user_access)
          .filter(db.models.user_access.field == id_)
          .delete())
        (g.session
          .query(db.models.field)
          .filter(db.models.field.id == id_)
          .delete())
        g.session.commit()

        return 'ok'

    abort(404)
Example #32
0
def api_user():
    if request.method == 'GET':
        # Read user
        # GET /api/user[?id=<id>&username=<username>]
        users = []

        query = g.session.query(db.models.user)
        for key, value in request.args.items():
            attr = getattr(db.models.user, key, None)
            if attr:
                query = query.filter(attr == value)
        query = query.order_by(db.models.user.id).all()

        for user in query:
            query_access = (g.session
                             .query(db.models.field, db.models.user_access)
                             .select_from(db.models.user_access)
                             .join(db.models.field)
                             .filter(db.models.user_access.user == user.id)
                             .order_by(db.models.user_access.id)
                             .all())
            access = []
            active = None
            for field, acc in query_access:
                access.append(utils.row2dict(field))
                if acc.is_active:
                    active = acc.field

            users.append({
                'id': user.id,
                'username': user.username,
                'is_superuser': user.is_superuser,
                'access': access,
                'active': active
            })
        return json.dumps(users)
    elif request.method == 'POST':
        # Create user
        # POST /api/user
        # {username:<username>, password:<password>, is_superuser:<is_superuser>}
        username = request.json.get('username')
        password = generate_password_hash(request.json.get('password'))
        is_superuser = request.json.get('is_superuser')
        access = request.json.get('access', [])
        active = request.json.get('active')

        new_user = db.models.user(username=username,
                                  password=password,
                                  is_superuser=is_superuser)
        g.session.add(new_user)
        g.session.commit()

        for field in access:
            new_access = db.models.user_access(user=new_user.id, field=field.get('id'))
            if field.get('id') == active:
                new_access.is_active = True
            g.session.add(new_access)
        g.session.commit()

        return json.dumps(utils.row2dict(new_user))
    elif request.method == 'PUT':
        # Update user
        # PUT /api/user
        # {id:<id>, username:<username>, is_superuser:<is_superuser>}
        id_ = request.json.get('id')
        username = request.json.get('username')
        is_superuser = request.json.get('is_superuser')
        access = request.json.get('access', [])
        active = request.json.get('active')

        (g.session
          .query(db.models.user)
          .filter(db.models.user.id == id_)
          .update({'username': username,
                   'is_superuser': is_superuser}))
        (g.session
          .query(db.models.user_access)
          .filter(db.models.user_access.user == id_)
          .delete())
        for field in access:
            new_access = db.models.user_access(user=id_, field=field.get('id'))
            if field.get('id') == active:
                new_access.is_active = True
            g.session.add(new_access)
        g.session.commit()

        return 'ok'
    elif request.method == 'DELETE':
        # Delete user
        # DELETE /api/user?id=<id>
        id_ = request.args.get('id')
        (g.session
          .query(db.models.user_access)
          .filter(db.models.user_access.user == id_)
          .delete())
        (g.session
          .query(db.models.user)
          .filter(db.models.user.id == id_)
          .delete())
        g.session.commit()

        return 'ok'

    abort(404)