Ejemplo n.º 1
0
def get_houses(context, request):
    box = leaflet_box_to_WKT_polygon(request.params.getall('box')[0])

    session = DBSession()
    houses_in_box = session.query(House) \
        .filter(House.geo.within(box)) \
        .join(House.policeman) \
        .all()

    houses = []
    policemen = dict()
    for house in houses_in_box:
        geometry = loads(str(house.geo.geom_wkb))
        feature = Feature(
            id=house.osm_id,
            geometry=geometry,
            properties={
                'address': house.address,
                'pm_id': house.policeman_id
            }
        )
        houses.append(feature)

        policeman = house.policeman
        if not policeman.id in policemen:
	    photo_url_parsed = policeman.photo_url.split('/')	    
	    photo_name = photo_url_parsed[-2] + photo_url_parsed[-1].replace('img', '_file')
            policemen[policeman.id] = {
                'id': policeman.id,
                'name': policeman.name,
                'type': policeman.type,
                'rank': policeman.rank,
                'phone': policeman.phone,
                'url': policeman.url,
                'photo_url': request.route_url('home') + 'static/images/' + photo_name,
                'color': policeman.color
            }

    subdivisions_in_box = session.query(Subdivision) \
        .filter(Subdivision.geo.within(box)) \
        .all()

    subdivisions = {}
    for subdivision_in_box in subdivisions_in_box:
        subdivision = row2dict(subdivision_in_box, ['id', 'name', 'phone', 'address', 'hours', 'url'])
        subdivision['geo'] = loads(str(subdivision_in_box.geo.geom_wkb))
        subdivisions[subdivision['id']] = subdivision

    result = {
        'houses': FeatureCollection(houses),
        'policemen': policemen,
        'subdivisions': subdivisions
    }

    return Response(dumps(result))
Ejemplo n.º 2
0
def test_users():
    users = [User(email=f"{i}@loadtest.com", api_key=generate_api_key())
             for i in range(NUMBER_OF_TEST_USERS)]
    for user in users:
        db_session.add(user)
    db_session.commit()

    yield [row2dict(user) for user in users]

    for user in users:
        db_session.delete(user)
    db_session.commit()
def complete_account_registration(verify_hash: str,
                                  Authorize: AuthJWT = Depends(),
                                  db: Session = Depends(get_db)):
    settings = db.query(AccountSettings).filter_by(
        verify_account_hash=verify_hash).first()
    if settings is not None:
        acct_uuid = settings.uuid
        settings.verify_account_hash = None
        settings.cancel_registration_hash = None
        db.merge(settings)
        db.commit()
        account = db.query(Account).filter_by(uuid=acct_uuid).first()
        if account is not None:
            account.is_verified = True
            db.merge(account)
            db.commit()
            create_access_and_refresh_tokens(str(acct_uuid), Authorize)
            user = row2dict(account)
            user.update(row2dict(settings))
            return user
    else:
        raise HTTPException(status_code=400,
                            detail=f"invalid hash or account does not exist")
Ejemplo n.º 4
0
def get_houses(context, request):
    box = leaflet_box_to_WKT_polygon(request.params.getall("box")[0])

    session = DBSession()
    houses_in_box = session.query(House).filter(House.geo.within(box)).join(House.policeman).all()

    houses = []
    policemen = dict()
    for house in houses_in_box:
        geometry = loads(str(house.geo.geom_wkb))
        feature = Feature(
            id=house.osm_id, geometry=geometry, properties={"address": house.address, "pm_id": house.policeman_id}
        )
        houses.append(feature)

        policeman = house.policeman
        if not policeman.id in policemen:
            photo_url_parsed = policeman.photo_url.split("/")
            photo_name = photo_url_parsed[-2] + photo_url_parsed[-1].replace("img", "_file")
            policemen[policeman.id] = {
                "id": policeman.id,
                "name": policeman.name,
                "type": policeman.type,
                "rank": policeman.rank,
                "phone": policeman.phone,
                "url": policeman.url,
                "photo_url": request.route_url("home") + "static/images/" + photo_name,
                "color": policeman.color,
            }

    subdivisions_in_box = session.query(Subdivision).filter(Subdivision.geo.within(box)).all()

    subdivisions = {}
    for subdivision_in_box in subdivisions_in_box:
        subdivision = row2dict(subdivision_in_box, ["id", "name", "phone", "address", "hours", "url"])
        subdivision["geo"] = loads(str(subdivision_in_box.geo.geom_wkb))
        subdivisions[subdivision["id"]] = subdivision

    result = {"houses": FeatureCollection(houses), "policemen": policemen, "subdivisions": subdivisions}

    return Response(dumps(result))
Ejemplo n.º 5
0
def get_job_logs(job_id: int, job_run_id: int):
    job_run_logs = job_service.get_logs_for_run(job_id, session['profile']['internal_user_id'], job_run_id)
    logs = [row2dict(log_record) for log_record in job_run_logs]
    return jsonify(logs), 200
Ejemplo n.º 6
0
def get_job(job_id):
    job = job_service.get(job_id, session['profile']['internal_user_id'])
    return jsonify(row2dict(job)), 200
Ejemplo n.º 7
0
def get_jobs():
    user_id = session['profile']['internal_user_id']
    jobs = workspace_service.get_default_workspace(user_id).jobs
    rv = [row2dict(job) for job in jobs]
    return jsonify(rv), 200
Ejemplo n.º 8
0
def get_job_parameters(job_id: int):
    parameters = job_service.get_parameters_for_job(job_id, session['profile']['internal_user_id'])
    parameters = [row2dict(parameter) for parameter in parameters]
    return jsonify(parameters), 200
Ejemplo n.º 9
0
def get_template(template_id):
    job = marketplace_service.get_template(template_id)
    return jsonify(row2dict(job)), 200
Ejemplo n.º 10
0
def get_templates():
    templates = marketplace_service.get_templates()
    rv = [row2dict(template) for template in templates]
    return jsonify(rv), 200
Ejemplo n.º 11
0
def test_load(test_users):
    # We should be able to find this value in logs later
    pi_digits = str(pi())

    # Create test jobs using the API
    tar = tarfile.open(PACKAGE_NAME, "w:gz")
    tar.add("load_test/test_seamless_project", ".")
    tar.close()
    created_job_ids_by_user_id = defaultdict(list)
    created_job_names_by_user_id = defaultdict(list)
    for user in test_users:
        for i in range(JOBS_PER_USER):
            job_name = f"user_{user['id']}_job_{i}"
            resp = requests.put(
                PUBLISH_URL,
                params={
                    "name": job_name,
                    "schedule": TEST_SCHEDULE
                },
                headers={'Authorization': user['api_key']},
                files={'seamless_project': open(PACKAGE_NAME, 'rb')})
            resp.raise_for_status()
            created_job_ids_by_user_id[user['id']].append(
                resp.json()['job_id'])
            created_job_names_by_user_id[user['id']].append(job_name)

    # Wait until they are all executed
    sleep(WAIT_FOR_EXECUTION_SECONDS)

    try:
        for user in test_users:
            for job_id in created_job_ids_by_user_id[user['id']]:
                job = db_session.query(Job).get(job_id)

                # There may be more runs because of the buffer, but we only check first TEST_RUNS
                job_runs = sorted(list(job.runs),
                                  key=lambda x: x.created_at)[:TEST_RUNS]
                print([row2dict(l) for l in job_runs])

                # Each job should be executed at least TEST_RUNS times
                # But no more than TEST_RUNS + 1 because of WAIT_FOR_EXECUTION_SECONDS
                assert TEST_RUNS <= len(job_runs) <= TEST_RUNS + 2

                # Make sure runs are 1 minute +/- 5 seconds from each other
                timestamps = [r.created_at for r in job_runs]
                for i in range(len(timestamps) - 1):
                    assert abs((timestamps[i + 1] -
                                timestamps[i]).total_seconds() - 60) < 5

                for run in job_runs:
                    logs = list(run.logs)
                    print([row2dict(l) for l in logs])

                    # All executions should be successful
                    assert run.status == JobRunStatus.Ok.value

                    # Logs recorded for each job should be exactly like this
                    assert logs[0].message == 'SciPy version: 1.5.1\n'
                    assert logs[1].message == 'Executing...\n'
                    index = 2
                    for i in range(0, PI_DIGITS, MAX_LOGS_ROW_LENGTH):
                        if i + MAX_LOGS_ROW_LENGTH < len(pi_digits):
                            pi_digits_in_logs = pi_digits[i:i +
                                                          MAX_LOGS_ROW_LENGTH]
                        else:
                            pi_digits_in_logs = f'{pi_digits[i:]}\n'
                        assert logs[index].message == pi_digits_in_logs
                        index += 1
    finally:
        # Remove all test jobs using the API
        for user in test_users:
            for job_name in created_job_names_by_user_id[user['id']]:
                resp = requests.delete(
                    f"{DELETE_URL}{job_name}",
                    headers={'Authorization': user['api_key']})
                resp.raise_for_status()