def tasks_page(task_id): if not re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', task_id): return render_template("tasknotfound.html"), 404 try: task = db_session.query(Task).filter(Task.id == task_id).one() task_info = db_session.query(Task.id, \ Task.log_actual_point_count, \ Task.log_execution_time, \ func.ST_AsText(Task.geom).label('wkt'), \ func.ST_XMin(Task.geom).label('minx'), \ func.ST_YMin(Task.geom).label('miny'), \ func.ST_XMax(Task.geom).label('maxx'), \ func.ST_YMax(Task.geom).label('maxy'), \ (Task.log_actual_point_count/func.ST_Area(Task.geom)).label('density'), \ ).filter(Task.id==task_id).one() task_dict = task_info._asdict() task_dict['classes'] = task.get_classnames() except NoResultFound: return render_template("tasknotfound.html"), 404 status = task.get_status() datasets = Dataset.query.all() dataset_i = datasets.index(task.dataset) if status == 'SUCCESS': filename = app.config['RESULTS_FOLDER'] + task_id + '.laz' if (os.path.exists(filename)): return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task = task_dict, status='okay', download_url=task.get_relative_url()) else: return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task = task_dict, status='deleted') elif status == 'PENDING' or status == 'RETRY': return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task = task_dict, status='pending', refresh=True) else: return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task = task_dict, status='failure')
def new_task(left, bottom, right, top, ahn2_class): if ahn2_class == 'ug': ahn2_class = 'u|g' ewkt = get_ewkt_from_bounds(left, bottom, right, top) # geojson = get_geojson_from_bounds(left, bottom, right, top) filenames = db_session.query(Tile.path).filter(Tile.ahn2_class.match(ahn2_class)).filter(Tile.geom.intersects(ewkt)).all() filenames = [f[0] for f in filenames] output_laz = app.config['RESULTS_FOLDER'] + str(new_task.request.id)+'.laz' # this will cause an exception if something goes wrong while calling lasmerge executable t0 = time.time() lastools.lasmerge(filenames, left, bottom, right, top, output_laz) t1 = time.time() t = db_session.query(Task).filter(Task.id==str(new_task.request.id)).one() try: t.send_email() except: pass infotxt = lastools.lasinfotxt(output_laz) info = tile_io.read_lasinfotxt(infotxt) t.log_execution_time = t1-t0 t.log_actual_point_count = info['pointcount'] db_session.commit()
def tasks_page(task_id): if not re.match( r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', task_id): return render_template("tasknotfound.html"), 404 try: task = db_session.query(Task).filter(Task.id == task_id).one() task_info = db_session.query(Task.id, \ Task.log_actual_point_count, \ Task.log_execution_time, \ func.ST_AsText(Task.geom).label('wkt'), \ func.ST_XMin(Task.geom).label('minx'), \ func.ST_YMin(Task.geom).label('miny'), \ func.ST_XMax(Task.geom).label('maxx'), \ func.ST_YMax(Task.geom).label('maxy'), \ (Task.log_actual_point_count/func.ST_Area(Task.geom)).label('density'), \ ).filter(Task.id==task_id).one() task_dict = task_info._asdict() task_dict['classes'] = task.get_classnames() except NoResultFound: return render_template("tasknotfound.html"), 404 status = task.get_status() datasets = Dataset.query.all() dataset_i = datasets.index(task.dataset) if status == 'SUCCESS': filename = app.config['RESULTS_FOLDER'] + task_id + '.laz' if (os.path.exists(filename)): return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task=task_dict, status='okay', download_url=task.get_relative_url()) else: return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task=task_dict, status='deleted') elif status == 'PENDING' or status == 'RETRY': return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task=task_dict, status='pending', refresh=True) else: return render_template("index.html", active_dataset_i=dataset_i, datasets=datasets, task=task_dict, status='failure')
def submitnewtask(): left = request.args.get('left', type=float) bottom = request.args.get('bottom', type=float) right = request.args.get('right', type=float) top = request.args.get('top', type=float) email = request.args.get('email', type=str) classification = request.args.get('classification', type=str) # email validation if not re.match(r"[^@]+@[^@]+\.[^@]+", email): return jsonify(wronginput = "Invalid email address") # classification validation if not re.match(r"^(?=\w{1,2}$)([ug]).*", classification): return jsonify(wronginput = "Wrong AHN2 classification") # selection bounds validation ewkt = get_ewkt_from_bounds(left, bottom, right, top) if 0 == db_session.query(Tile).filter( Tile.geom.intersects( ewkt ) ).count(): return jsonify(wronginput = "Selection is empty") if not request.remote_addr in app.config['TRUSTED_IP_ADDRESSES'] and get_point_count_estimate_from_ewkt(ewkt) > app.config['MAX_POINT_QUERY_SIZE']: return jsonify(wronginput = "At this time we don't accept requests larger than {} points. Draw a smaller selection to continue.".format(format_big_number(app.config['MAX_POINT_QUERY_SIZE']))) # new celery task result = new_task.apply_async((left, bottom, right, top, classification)) # store task parameters in db task = Task(id=result.id, ahn2_class=classification, emailto=email, geom=get_ewkt_from_bounds(left, bottom, right, top),\ time_stamp=datetime.datetime.now(), ip_address=request.remote_addr ) db_session.add(task) db_session.commit() taskurl = url_for('tasks_page', task_id=result.id) return jsonify(result = taskurl)
def merge_tiles_from_taskfile(taskfile): with open(taskfile) as f: left = float(f.readline().split(':')[-1]) bottom = float(f.readline().split(':')[-1]) right = float(f.readline().split(':')[-1]) top = float(f.readline().split(':')[-1]) ahn2_class = f.readline().split(':')[-1].strip() if ahn2_class == 'ug': ahn2_class = 'u|g' ewkt = get_ewkt_from_bounds(left, bottom, right, top) filenames = db_session.query(Tile.path).filter( Tile.ahn2_class.match(ahn2_class)).filter( Tile.geom.intersects(ewkt)).all() filenames = [f[0] for f in filenames] lasmerge(filenames, left, bottom, right, top, taskfile[:-3] + 'laz')
def submitnewtask(): left = request.args.get('left', type=float) bottom = request.args.get('bottom', type=float) right = request.args.get('right', type=float) top = request.args.get('top', type=float) email = request.args.get('email', type=str) classification = request.args.get('classification', type=str) dataset_id = request.args.get('dataset_id', type=int) # email validation if not re.match(r"[^@]+@[^@]+\.[^@]+", email): return jsonify(wronginput="Invalid email address") # classification validation if not re.match(r"^([1-9][0-9]?)(,[1-9][0-9]?)*$", classification): return jsonify(wronginput="Invalid classification string") classification = classification.split(',') # dataset validation dataset = Dataset.query.get(dataset_id) if dataset is None: return jsonify(wronginput="Invalid dataset name") # selection bounds validation ewkt = get_ewkt_from_bounds(left, bottom, right, top) if 0 == db_session.query(Tile).filter(Tile.geom.intersects(ewkt)).count(): return jsonify(wronginput="Selection is empty") if not request.remote_addr in app.config[ 'TRUSTED_IP_ADDRESSES'] and get_point_count_estimate_from_ewkt( dataset, ewkt) > app.config['MAX_POINT_QUERY_SIZE']: return jsonify( wronginput= "At this time we don't accept requests larger than {} points. Draw a smaller selection to continue." .format(format_big_number(app.config['MAX_POINT_QUERY_SIZE']))) # new celery task result = new_task.apply_async( (left, bottom, right, top, dataset_id, classification)) # store task parameters in db task = Task(id=result.id, dataset=dataset, classes=classification, emailto=email, geom=get_ewkt_from_bounds(left, bottom, right, top),\ time_stamp=datetime.datetime.now(), ip_address=request.remote_addr ) db_session.add(task) db_session.commit() taskurl = url_for('tasks_page', task_id=result.id) return jsonify(result=taskurl)
def get_point_count_estimate_from_ewkt(dataset, ewkt): tiles = db_session.query( Tile.pointcount \ * \ func.ST_Area( Tile.geom.ST_Intersection(ewkt) ) / Tile.geom.ST_Area() \ ).filter(Tile.geom.intersects(ewkt), Tile.dataset==dataset) return sum( [ v[0] for v in tiles ] )
def getTaskArea(): #should prob validate this task_id = request.args.get('task_id', type=str) geojson = db_session.query(func.ST_AsGeoJSON(Task.geom)).filter(Task.id==task_id).one()[0] return jsonify(result=geojson)
def get_point_count_estimate_from_ewkt(dataset, ewkt): tiles = db_session.query( Tile.pointcount \ * \ func.ST_Area( Tile.geom.ST_Intersection(ewkt) ) / Tile.geom.ST_Area() \ ).filter(Tile.geom.intersects(ewkt), Tile.dataset==dataset) return sum([v[0] for v in tiles])
def getTaskArea(): #should prob validate this task_id = request.args.get('task_id', type=str) geojson = db_session.query(func.ST_AsGeoJSON( Task.geom)).filter(Task.id == task_id).one()[0] return jsonify(result=geojson)
def getDownloadArea(): geojson = db_session.query(func.ST_AsGeoJSON(func.ST_Union(Tile.geom))).one()[0] return jsonify(result=geojson)