def discretize(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) column_name = request.args.get('col-name') discretization = request.args.get('discretization') try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) if discretization == 'eq-width': num_intervals = int(request.args.get('num-intervals')) numerical_transformer.equal_width_interval(dataset_id, table_name, column_name, num_intervals) elif discretization == 'eq-freq': num_intervals = int(request.args.get('num-intervals')) numerical_transformer.equal_freq_interval(dataset_id, table_name, column_name, num_intervals) elif discretization == 'manual': intervals = [ int(n) for n in request.args.get('intervals').strip().split(',') ] numerical_transformer.manual_interval(dataset_id, table_name, column_name, intervals) else: flash(u"Data couldn't be discritized.", 'danger') return jsonify({'error': True}), 400 except ValueError: flash(u"Data couldn't be discritized.", 'danger') return jsonify({'error': True}), 400 flash(u"Data has been discritized.", 'success') return jsonify({'success': True}), 200
def get_table(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') order_column_idx = int(request.args.get('order[0][column]')) order_column_name = request.args.get( 'columns[{}][data]'.format(order_column_idx)) order_direction = request.args.get('order[0][dir]') ordering = (order_column_name, order_direction) search = request.args.get('search[value]') table = data_loader.get_table(dataset_id, table_name, offset=start, limit=length, ordering=ordering, search=search) # Make proper data dict out of table rows data = list() for r_ix in range(len(table.rows)): r = dict() for c_ix in range(len(table.columns)): r[table.columns[c_ix].name] = table.rows[r_ix][c_ix] data.append(r) return jsonify(draw=int(request.args.get('draw')), recordsTotal=table.total_size, recordsFiltered=table.total_size, data=data) # table.rows
def get_join_column_names(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) column_names = data_loader.get_column_names(dataset_id, table_name) return jsonify(column_names)
def join_tables(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) # Extract data from form join_pairs = list() try: active_user_handler.make_user_active_in_dataset(dataset_id, current_user.username) name = request.form.get('table-name').replace('"', '') if not len(name): raise Exception('Joined table name cointained " only') meta = request.form.get('table-meta') f = request.form # Iterate over all keys in form starting with join, each join<number> represents a join_pair for key in f.keys(): if key.startswith("join"): join_pair_row = f.getlist(key) t1 = join_pair_row[0] t1_column = join_pair_row[1] relation_operator = join_pair_row[2] t2 = join_pair_row[3] t2_column = join_pair_row[4] join_pair = TableJoinPair(table1_name=t1, table2_name=t2, table1_column=t1_column, table2_column=t2_column, relation_operator=relation_operator) join_pairs.append(join_pair) else: continue table_joiner.join_multiple_tables(dataset_id, name, meta, join_pairs) flash(u"Join of tables was successful.", 'success') except Exception as e: flash(u"Join of tables was unsuccessful.", 'danger') return redirect(url_for('data_service.get_dataset', dataset_id=dataset_id))
def find_and_replace(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) colomn = request.args.get('col-name') replacement_function = request.args.get('replacement-function') replacement_value = request.args.get('replacement-value') if replacement_function == "regex": regex = request.args.get('replacement-regex') data_transformer.find_and_replace_by_regex(dataset_id, table_name, colomn, regex, replacement_value) else: value_to_be_replaced = request.args.get('value-to-be-replaced') data_transformer.find_and_replace(dataset_id, table_name, colomn, value_to_be_replaced, replacement_value, replacement_function) flash(u"Find and replace was successful.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Find and replace was unsuccessful.", 'danger') return jsonify({'error': True}), 400
def get_table(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') order_column_idx = int(request.args.get('order[0][column]')) order_column_name = request.args.get('columns[{}][data]'.format(order_column_idx)) order_direction = request.args.get('order[0][dir]') ordering = (order_column_name, order_direction) search = request.args.get('search[value]') table = data_loader.get_table(dataset_id, table_name, offset=start, limit=length, ordering=ordering, search=search) # Make proper data dict out of table rows data = list() for r_ix in range(len(table.rows)): r = dict() for c_ix in range(len(table.columns)): r[table.columns[c_ix].name] = table.rows[r_ix][c_ix] data.append(r) return jsonify(draw=int(request.args.get('draw')), recordsTotal=table.total_size, recordsFiltered=table.total_size, data=data) # table.rows
def export_table(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) # Maybe later we might add other types, but for now this is hardcoded to export as CSV try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) filename = table_name + ".csv" path = UPLOAD_FOLDER + "/" + filename separator = request.args.get('separator') quote_char = request.args.get('quote_char') empty_char = request.args.get('empty_char') data_loader.export_table(path, dataset_id, table_name, separator=separator, quote_char=quote_char, empty_char=empty_char) flash(u"Data has been exported.", 'success') return send_from_directory(UPLOAD_FOLDER, filename, as_attachment=True) except Exception: flash(u"Data couldn't be exported.", 'danger') return jsonify({'error': True}), 400
def add_table(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) if 'file' not in request.files: return get_dataset(dataset_id) file = request.files['file'] # If the user doesn't select a file, the browser # submits an empty part without filename if file.filename == '': return get_dataset(dataset_id) # TEMP solution: create UPLOAD_FOLDER if it doesn't exists to prevent 'file not found' error. # This should probably be done in some setup function and not every time this method is called if not os.path.exists(UPLOAD_FOLDER): os.makedirs(UPLOAD_FOLDER) if file and allowed_file(file.filename): filename = secure_filename(file.filename) path = os.path.join(UPLOAD_FOLDER, filename) try: file.save(path) except Exception as e: app.logger.error("[ERROR] Failed to upload file '" + file.filename + "'") app.logger.exception(e) file.close() os.remove(path) flash(u"Failed to upload file.", 'danger') return get_dataset(dataset_id) current_user.active_schema = dataset_id try: type_deduction = (request.form.get('ds-type-deduction') is not None) # Unchecked returns None table_name = request.form.get('ds-table-name') or filename.rsplit('.')[0] table_desc = request.form.get('ds-table-desc') or 'Default description' table_name = table_name.replace('"', '') if table_name.isspace(): table_name = filename.rsplit('.')[0] if filename[-3:] == "zip": data_loader.process_zip(path, dataset_id, type_deduction=type_deduction) elif filename[-3:] == "csv": create_new = not data_loader.table_exists(table_name, dataset_id) if create_new: data_loader.process_csv(path, dataset_id, table_name, table_description=table_desc, type_deduction=type_deduction) else: data_loader.process_csv(path, dataset_id, table_name, table_description=table_desc, append=True, type_deduction=type_deduction) else: data_loader.process_dump(path, dataset_id, table_name=table_name, table_description=table_desc) flash(u"Data has been imported.", 'success') except Exception as e: app.logger.error("[ERROR] Failed to process file '" + filename + "'") app.logger.exception(e) flash(u"Data couldn't be imported.", 'danger') return get_dataset(dataset_id) file.close() os.remove(path) return get_dataset(dataset_id)
def delete_dataset(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: data_loader.delete_dataset(dataset_id) flash(u"Dataset has been deleted.", 'success') except Exception: flash(u"Something went wrong while deleting your dataset.", 'danger') return redirect(url_for('data_service.get_datasets'), code=303)
def get_active_users(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) active_users = active_user_handler.get_active_users_in_table(dataset_id, table_name, current_user.username) return jsonify(data=active_users) except Exception: return jsonify({'error': True}), 400
def delete_backup(dataset_id, table_name, timestamp): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) data_loader.delete_backup(dataset_id, table_name, timestamp) return jsonify({'success': True}), 200 except Exception: return jsonify({'error': True}), 400
def delete_backup(dataset_id, table_name, timestamp): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) data_loader.delete_backup(dataset_id, table_name, timestamp) return jsonify({'success': True}), 200 except Exception: return jsonify({'error': True}), 400
def delete_table(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: data_loader.delete_table(table_name, dataset_id) active_user_handler.make_user_active_in_dataset(dataset_id, current_user.username) flash(u"Table has been removed.", 'success') except Exception: flash(u"Table couldn't be removed.", 'danger') return redirect(url_for('data_service.get_dataset', dataset_id=dataset_id), code=303)
def get_backup_info(dataset_id, table_name, timestamp): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) if timestamp == "DEFAULT": return "Select backup to display note..." note = data_loader.get_backup_info(dataset_id, table_name, timestamp) return note except Exception: return ""
def get_active_users(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) active_users = active_user_handler.get_active_users_in_table( dataset_id, table_name, current_user.username) return jsonify(data=active_users) except Exception: return jsonify({'error': True}), 400
def undo_action(dataset_id, table_name, action_id): if not data_loader.has_access(current_user.username, dataset_id): return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) try: _history.undo_action(dataset_id, table_name, action_id) flash(u"Action was undone", 'success') return jsonify({'success': True}), 200 except Exception as e: flash(u"Action could not be undone", 'danger') return jsonify({'error': True}), 400
def undo_action(dataset_id, table_name, action_id): if not data_loader.has_access(current_user.username, dataset_id): return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) try: _history.undo_action(dataset_id, table_name, action_id) flash(u"Action was undone", 'success') return jsonify({'success': True}), 200 except Exception as e: flash(u"Action could not be undone", 'danger') return jsonify({'error': True}), 400
def delete_row(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) row_ids = [key.split('-')[1] for key in request.args] data_loader.delete_row(dataset_id, table_name, row_ids) flash(u"Rows have been deleted.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Rows couldn't be deleted.", 'danger') return jsonify({'error': True}), 400
def normalize(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') numerical_transformer.normalize(dataset_id, table_name, column_name) flash(u"Data has been normalized.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Data couldn't be normalized.", 'danger') return jsonify({'error': True}), 400
def get_backup_info(dataset_id, table_name, timestamp): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) if timestamp == "DEFAULT": return "Select backup to display note..." note = data_loader.get_backup_info(dataset_id, table_name, timestamp) return note except Exception: return ""
def one_hot_encode(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') one_hot_encoder.encode(dataset_id, table_name, column_name) flash(u"One hot encoding was successful.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"One hot encoding was unsuccessful.", 'danger') return jsonify({'error': True}), 400
def delete_column(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') data_loader.delete_column(dataset_id, table_name, column_name) flash(u"Column has been deleted.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Column couldn't be deleted.", 'danger') return jsonify({'error': True}), 400
def one_hot_encode(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') one_hot_encoder.encode(dataset_id, table_name, column_name) flash(u"One hot encoding was successful.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"One hot encoding was unsuccessful.", 'danger') return jsonify({'error': True}), 400
def transform_date_or_time(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') operation_name = request.args.get('operation-name') date_time_transformer.transform(dataset_id, table_name, column_name, operation_name) flash(u"Date/Time transformation was successful.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Date/Time transformation was unsuccessful.", 'danger') return jsonify({'error': True}), 400
def update_dataset_metadata(): try: dataset_id = request.args.get('ds-id') new_name = request.args.get('ds-name') new_desc = request.args.get('ds-desc') if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) data_loader.update_dataset_metadata(dataset_id, new_name, new_desc) flash(u"Metadata has been updated.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Metadata couldn't be updated.", 'danger') return jsonify({'error': True}), 400
def normalize(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') numerical_transformer.normalize(dataset_id, table_name, column_name) flash(u"Data has been normalized.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Data couldn't be normalized.", 'danger') return jsonify({'error': True}), 400
def delete_column(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') data_loader.delete_column(dataset_id, table_name, column_name) flash(u"Column has been deleted.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Column couldn't be deleted.", 'danger') return jsonify({'error': True}), 400
def delete_row(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) row_ids = [key.split('-')[1] for key in request.args] data_loader.delete_row(dataset_id, table_name, row_ids) flash(u"Rows have been deleted.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Rows couldn't be deleted.", 'danger') return jsonify({'error': True}), 400
def update_dataset_metadata(): try: dataset_id = request.args.get('ds-id') new_name = request.args.get('ds-name') new_desc = request.args.get('ds-desc') if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) data_loader.update_dataset_metadata(dataset_id, new_name, new_desc) flash(u"Metadata has been updated.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Metadata couldn't be updated.", 'danger') return jsonify({'error': True}), 400
def outliers(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') option = request.args.get('option') == 'less-than' value = float(request.args.get('value')) numerical_transformer.remove_outlier(dataset_id, table_name, column_name, value, option) except ValueError: flash(u"Outliers couldn't be removed.", 'danger') return jsonify({'error': True}), 400 flash(u"Outliers have been removed.", 'success') return jsonify({'success': True}), 200
def restore_backup(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) backup_ts = request.args.get('backup-timestamp') if backup_ts == "DEFAULT": return jsonify({'error': True}), 400 data_loader.restore_backup(dataset_id, table_name, backup_ts) flash(u"Succesfully restored backup.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Failed to restore backup.", 'danger') return jsonify({'error': True}), 400
def rename_column(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) to_rename = request.args.get('col-name') new_name = request.args.get('new-name').replace('"', '') if not len(new_name) or new_name == to_rename: raise Exception('Column name is empty') data_loader.rename_column(dataset_id, table_name, to_rename, new_name) flash(u"Column has been renamed.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Column couldn't be renamed.", 'danger') return jsonify({'error': True}), 400
def chart(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') column_type = request.args.get('col-type') if column_type not in ['real', 'double', 'integer', 'timestamp']: return jsonify(numerical_transformer.chart_data_categorical(dataset_id, table_name, column_name)) else: return jsonify(numerical_transformer.chart_data_numerical(dataset_id, table_name, column_name)) except Exception: flash(u"Charts couldn't be produced.", 'danger') return jsonify({'error': True}), 400
def transform_date_or_time(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') operation_name = request.args.get('operation-name') date_time_transformer.transform(dataset_id, table_name, column_name, operation_name) flash(u"Date/Time transformation was successful.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Date/Time transformation was unsuccessful.", 'danger') return jsonify({'error': True}), 400
def restore_backup(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) backup_ts = request.args.get('backup-timestamp') if backup_ts == "DEFAULT": return jsonify({'error': True}), 400 data_loader.restore_backup(dataset_id, table_name, backup_ts) flash(u"Succesfully restored backup.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Failed to restore backup.", 'danger') return jsonify({'error': True}), 400
def rename_column(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) to_rename = request.args.get('col-name') new_name = request.args.get('new-name').replace('"', '') if not len(new_name) or new_name == to_rename: raise Exception('Column name is empty') data_loader.rename_column(dataset_id, table_name, to_rename, new_name) flash(u"Column has been renamed.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Column couldn't be renamed.", 'danger') return jsonify({'error': True}), 400
def outliers(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') option = request.args.get('option') == 'less-than' value = float(request.args.get('value')) numerical_transformer.remove_outlier(dataset_id, table_name, column_name, value, option) except ValueError: flash(u"Outliers couldn't be removed.", 'danger') return jsonify({'error': True}), 400 flash(u"Outliers have been removed.", 'success') return jsonify({'success': True}), 200
def update_table_metadata(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_dataset(dataset_id, current_user.username) old_table_name = request.args.get('t-old-name') new_table_name = request.args.get('t-name').replace('"', '') if not len(new_table_name): raise Exception('Could not rename table') new_desc = request.args.get('t-desc') data_loader.update_table_metadata(dataset_id, old_table_name, new_table_name, new_desc) flash(u"Metadata has been updated.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Metadata couldn't be updated.", 'danger') return jsonify({'error': True}), 400
def show_dedup_data_alg(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) dedup_table_name = "_dedup_" + table_name + "_grouped" dedup_table_exists = data_loader.table_exists(dedup_table_name, dataset_id) if not dedup_table_exists: flash(u"Duplicate data does not exist.", 'warning') return redirect(url_for('data_service.get_table', dataset_id=dataset_id, table_name=table_name)) try: group_id = data_deduplicator.get_next_group_id(dataset_id, table_name) table = data_deduplicator.get_cluster(dataset_id, table_name, group_id) title = "Duplicate data for " + table_name + ": Group " + str(group_id) return render_template('data_service/dedup-cluster-view.html', table=table, title=title) except Exception: flash(u"Duplicate data couldn't be shown.", 'danger') return redirect(url_for('data_service.get_table', dataset_id=dataset_id, table_name=table_name), code=303)
def show_raw_data(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') order_column = int(request.args.get('order[0][column]')) order_direction = request.args.get('order[0][dir]') raw_table_name = "_raw_" + table_name ordering = (data_loader.get_column_names(dataset_id, raw_table_name)[order_column], order_direction) table = data_loader.get_table(dataset_id, raw_table_name, offset=start, limit=length, ordering=ordering) _table = data_loader.get_table(dataset_id, raw_table_name) return jsonify(draw=int(request.args.get('draw')), recordsTotal=len(_table.rows), recordsFiltered=len(_table.rows), data=table.rows)
def show_raw_data(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) raw_table_name = "_raw_" + table_name raw_table_exists = data_loader.table_exists(raw_table_name, dataset_id) if not raw_table_exists: flash(u"Raw data does not exist.", 'warning') return redirect(url_for('data_service.get_table', dataset_id=dataset_id, table_name=table_name)) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) table = data_loader.get_table(dataset_id, raw_table_name) title = "Raw data for " + table_name return render_template('data_service/raw-table-view.html', table=table, title=title) except Exception: flash(u"Raw data couldn't be shown.", 'danger') return redirect(url_for('data_service.get_table', dataset_id=dataset_id, table_name=table_name), code=303)
def impute_missing_data(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') function = request.args.get('function') if function == "CUSTOM": custom_value = request.args.get('custom-value') data_transformer.impute_missing_data(dataset_id, table_name, column_name, function, custom_value) else: data_transformer.impute_missing_data(dataset_id, table_name, column_name, function) flash(u"Missing data has been filled.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Couldn't fill missing data.", 'danger') return jsonify({'error': True}), 400
def get_history(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') search = request.args.get('search[value]') order_column = int(request.args.get('order[0][column]')) order_direction = request.args.get('order[0][dir]') ordering = (['date', 'action_desc'][order_column], order_direction) rows = _history.get_actions(dataset_id, table_name, offset=start, limit=length, ordering=ordering, search=search) _rows = _history.get_actions(dataset_id, table_name) return jsonify(draw=int(request.args.get('draw')), recordsTotal=len(_rows), recordsFiltered=len(_rows), data=rows)
def create_backup(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) if not data_loader.backup_available(dataset_id, table_name): flash(u"You have reached the limit amount of backups. You must remove a backup to create a new one.", 'danger') return jsonify({'error': True}), 400 note = request.args.get('backup-note') data_loader.make_backup(dataset_id, table_name, note) flash(u"Succesfully created backup.", 'success') return jsonify({'success': True}), 200 except Exception as e: if e.__str__() == "Backup limit reached.": flash(u"Can't create backup, limit reached.", 'danger') flash(u"Failed to create backup.", 'danger') return jsonify({'error': True}), 400
def update_table_metadata(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_dataset( dataset_id, current_user.username) old_table_name = request.args.get('t-old-name') new_table_name = request.args.get('t-name').replace('"', '') if not len(new_table_name): raise Exception('Could not rename table') new_desc = request.args.get('t-desc') data_loader.update_table_metadata(dataset_id, old_table_name, new_table_name, new_desc) flash(u"Metadata has been updated.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Metadata couldn't be updated.", 'danger') return jsonify({'error': True}), 400
def add_row(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) values = dict() columns = list() for key in request.args: if key.startswith('value-col'): col_name = key.split('-')[2] # Key is of the form "value-col-[name]" values[col_name] = request.args.get(key) columns.append(col_name) data_loader.insert_row(table_name, dataset_id, columns, values) flash(u"Rows have been added.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Rows couldn't be added.", 'danger') return jsonify({'error': True}), 400
def get_dataset(dataset_id): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) dataset = data_loader.get_dataset(dataset_id, current_user.username) tables = data_loader.get_tables(dataset_id, current_user.username) users_with_access = data_loader.get_dataset_access(dataset_id).rows access_permission = current_user.username in dataset.moderators current_user.active_schema = dataset_id columns = list() if len(tables) != 0: columns = data_loader.get_column_names(dataset_id, tables[0].name) active_user_handler.make_user_active_in_dataset(dataset_id, current_user.username) return render_template('data_service/dataset-view.html', ds=dataset, tables=tables, columns=columns, access_permission=access_permission, users_with_access=users_with_access)
def get_table(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: table = data_loader.get_table(dataset_id, table_name) statistics = data_loader.get_statistics_for_all_columns(dataset_id, table_name, table.columns) time_date_transformations = date_time_transformer.get_transformations() backups = data_loader.get_backups(dataset_id, table_name) raw_table_name = "_raw_" + table_name raw_table_exists = data_loader.table_exists(raw_table_name,dataset_id) current_user.active_schema = dataset_id active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) return render_template('data_service/table-view.html', table=table, time_date_transformations=time_date_transformations, statistics=statistics, raw_table_exists=raw_table_exists, backups=backups) except Exception: flash(u"Table couldn't be shown.", 'danger') return redirect(url_for('data_service.get_dataset', dataset_id=dataset_id), code=303)
def add_row(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) values = dict() columns = list() for key in request.args: if key.startswith('value-col'): col_name = key.split('-')[ 2] # Key is of the form "value-col-[name]" values[col_name] = request.args.get(key) columns.append(col_name) data_loader.insert_row(table_name, dataset_id, columns, values) flash(u"Rows have been added.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Rows couldn't be added.", 'danger') return jsonify({'error': True}), 400
def create_backup(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) if not data_loader.backup_available(dataset_id, table_name): flash( u"You have reached the limit amount of backups. You must remove a backup to create a new one.", 'danger') return jsonify({'error': True}), 400 note = request.args.get('backup-note') data_loader.make_backup(dataset_id, table_name, note) flash(u"Succesfully created backup.", 'success') return jsonify({'success': True}), 200 except Exception as e: if e.__str__() == "Backup limit reached.": flash(u"Can't create backup, limit reached.", 'danger') flash(u"Failed to create backup.", 'danger') return jsonify({'error': True}), 400
def chart(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') column_type = request.args.get('col-type') if column_type not in ['real', 'double', 'integer', 'timestamp']: return jsonify( numerical_transformer.chart_data_categorical( dataset_id, table_name, column_name)) else: return jsonify( numerical_transformer.chart_data_numerical( dataset_id, table_name, column_name)) except Exception: flash(u"Charts couldn't be produced.", 'danger') return jsonify({'error': True}), 400
def impute_missing_data(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) try: active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) column_name = request.args.get('col-name') function = request.args.get('function') if function == "CUSTOM": custom_value = request.args.get('custom-value') data_transformer.impute_missing_data(dataset_id, table_name, column_name, function, custom_value) else: data_transformer.impute_missing_data(dataset_id, table_name, column_name, function) flash(u"Missing data has been filled.", 'success') return jsonify({'success': True}), 200 except Exception: flash(u"Couldn't fill missing data.", 'danger') return jsonify({'error': True}), 400
def show_raw_data(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') order_column = int(request.args.get('order[0][column]')) order_direction = request.args.get('order[0][dir]') raw_table_name = "_raw_" + table_name ordering = (data_loader.get_column_names(dataset_id, raw_table_name)[order_column], order_direction) table = data_loader.get_table(dataset_id, raw_table_name, offset=start, limit=length, ordering=ordering) _table = data_loader.get_table(dataset_id, raw_table_name) return jsonify(draw=int(request.args.get('draw')), recordsTotal=len(_table.rows), recordsFiltered=len(_table.rows), data=table.rows)
def get_history(dataset_id, table_name): if (data_loader.has_access(current_user.username, dataset_id)) is False: return abort(403) active_user_handler.make_user_active_in_table(dataset_id, table_name, current_user.username) start = request.args.get('start') length = request.args.get('length') search = request.args.get('search[value]') order_column = int(request.args.get('order[0][column]')) order_direction = request.args.get('order[0][dir]') ordering = (['date', 'action_desc'][order_column], order_direction) rows = _history.get_actions(dataset_id, table_name, offset=start, limit=length, ordering=ordering, search=search) _rows = _history.get_actions(dataset_id, table_name) return jsonify(draw=int(request.args.get('draw')), recordsTotal=len(_rows), recordsFiltered=len(_rows), data=rows)