def index(dataset): dataset = get_dataset(dataset) q = Run.all(dataset) if 'source' in request.args: q = q.filter(Run.source == request.args.get('source')) pager = Pager(q, dataset=dataset.name) return jsonify(pager)
def update(name): dataset = get_dataset(name) require.dataset.update(dataset) dataset.update(validate_dataset(request_data())) dataset.touch() db.session.commit() return view(name)
def view(dataset, id): dataset = get_dataset(dataset) run = obj_or_404(Run.by_id(dataset, id)) data = run.to_dict() package = data_manager.package(dataset.name) data['messages'] = list(logger.load(package, run.id)) return jsonify(data)
def delete(name): dataset = get_dataset(name) require.dataset.update(dataset) dataset.fact_table.drop() db.session.delete(dataset) db.session.commit() return jsonify({'status': 'deleted'}, status=410)
def view(dataset): dataset = get_dataset(dataset) etag_cache_keygen(dataset.updated_at) managers = list(dataset.managers) return render_template('dataset/view.html', dataset=dataset, managers=managers, templates=angular_templates(current_app))
def structure(name): dataset = get_dataset(name) etag_cache_keygen(dataset) return jsonify({ 'fields': dataset.fields, 'samples': dataset.samples })
def serve(dataset, name): dataset = get_dataset(dataset) package = data_manager.package(dataset.name) source = Source(package, name) if source.url is not None: return redirect(source.url) return send_file(source.fh(), mimetype=source.meta.get('mime_type'))
def update_model(name): dataset = get_dataset(name) require.dataset.update(dataset) model_data = validate_model(request_data()) dataset.update_model(model_data) dataset.touch() db.session.commit() return model(name)
def submit(dataset): dataset = get_dataset(dataset) require.dataset.update(dataset) data = request_data() if not data.get('url'): raise BadRequest("You need to submit a URL") load_from_url.delay(dataset.name, data.get('url')) return jsonify({'status': 'ok'})
def index(dataset): dataset = get_dataset(dataset) disable_cache() q = Run.all(dataset) if "source" in request.args: q = q.filter(Run.source == request.args.get("source")) pager = Pager(q, dataset=dataset.name) return jsonify(pager)
def load(dataset, name): dataset = get_dataset(dataset) require.dataset.update(dataset) package = data_manager.package(dataset.name) source = Source(package, name) if not source.exists(): raise BadRequest('Source does not exist.') load_from_source.delay(dataset.name, source.name) return jsonify({'status': 'ok'})
def update_model(name): dataset = get_dataset(name) require.dataset.update(dataset) data = request_data() if isinstance(data, dict): data['fact_table'] = dataset.fact_table.table_name dataset.model = validate_model(data) db.session.commit() return model(name)
def index(dataset): dataset = get_dataset(dataset) package = data_manager.package(dataset.name) sources = list(package.all(Source)) sources = sorted(sources, key=lambda s: s.meta.get('updated_at'), reverse=True) rc = lambda ss: [source_to_dict(dataset, s) for s in ss] return jsonify(Pager(sources, dataset=dataset.name, limit=5, results_converter=rc))
def update_managers(name): dataset = get_dataset(name) require.dataset.update(dataset) data = validate_managers(request_data()) if current_user not in data['managers']: data['managers'].append(current_user) dataset.managers = data['managers'] db.session.commit() return managers(name)
def upload(dataset): dataset = get_dataset(dataset) require.dataset.update(dataset) file_ = request.files.get('file') if not file_ or not file_.filename: raise BadRequest("You need to upload a file") source = extract_fileobj(dataset, fh=file_, file_name=file_.filename) load_from_source.delay(dataset.name, source.name) return jsonify(source_to_dict(dataset, source))
def update_managers(name): dataset = get_dataset(name) require.dataset.update(dataset) data = validate_managers(request_data()) if current_user not in data['managers']: data['managers'].append(current_user) dataset.managers = data['managers'] dataset.touch() db.session.commit() return managers(name)
def index(dataset): dataset = get_dataset(dataset) package = data_manager.package(dataset.name) sources = list(package.all(Source)) sources = sorted(sources, key=lambda s: s.meta.get('updated_at'), reverse=True) rc = lambda ss: [source_to_dict(dataset, s) for s in ss] return jsonify( Pager(sources, dataset=dataset.name, limit=5, results_converter=rc))
def upload(dataset): dataset = get_dataset(dataset) require.dataset.update(dataset) file_ = request.files.get('file') if not file_ or not file_.filename: raise BadRequest("You need to upload a file") # TODO: consider copying this into a tempfile before upload to make # boto happy (it appears to be whacky in it's handling of flask uploads) source = extract_fileobj(dataset, fh=file_, file_name=file_.filename) load_from_source.delay(dataset.name, source.name) return jsonify(source_to_dict(dataset, source))
def sign(dataset): dataset = get_dataset(dataset) require.dataset.update(dataset) data = request_data() if not data.get('file_name'): raise BadRequest("You need to give a file name") data['mime_type'] = data.get('mime_type') or 'application/octet-stream' # create a stub: source = extract_fileobj(dataset, fh=StringIO(), file_name=data['file_name'], mime_type=data['mime_type']) # generate a policy document to replace with actual content: res = generate_s3_upload_policy(source, data['file_name'], data['mime_type']) return jsonify(res)
def view(name): dataset = get_dataset(name) etag_cache_keygen(dataset) return jsonify(dataset)
def managers(name): dataset = get_dataset(name) etag_cache_keygen(dataset) return jsonify({'managers': dataset.managers})
def view(dataset, name): dataset = get_dataset(dataset) package = data_manager.package(dataset.name) source = Source(package, name) return jsonify(source_to_dict(dataset, source))
def model(name): dataset = get_dataset(name) etag_cache_keygen(dataset) return jsonify(dataset.model_data)
def update_model(name): dataset = get_dataset(name) require.dataset.update(dataset) dataset.data['model'] = validate_model(request_data()) db.session.commit() return model(name)
def view(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify(dataset)
def model(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify(dataset.model or {})
def managers(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify({'managers': dataset.managers})
def structure(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify({ 'fields': dataset.fields })
def model(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify(dataset.model_data)
def structure(name): dataset = get_dataset(name) etag_cache_keygen(dataset, private=dataset.private) return jsonify({'fields': dataset.fields})
def app(dataset, *a, **kw): dataset = get_dataset(dataset) etag_cache_keygen(dataset.updated_at) return render_template('dataset/angular.html', dataset=dataset, templates=angular_templates(current_app))