def create_flow_user(email, username, password, fullname, is_sso, is_admin): # # Check if user exists # user = User.query.filter(User.user_name == username).first() if user is not None: print("User %s exists." % (username)) return user_type = User.STANDARD_USER if is_admin: user_type = User.SYSTEM_ADMIN # # Create user # print("Creating user %s" % (username)) user_id = create_user(email, username, password, fullname, user_type) # # Add user to flow organization # print("Creating organization user.") org_user = OrganizationUser() org_user.organization_id = find_resource('/testing').id org_user.user_id = user_id org_user.is_admin = is_admin db.session.add(org_user) db.session.commit() # # Create a folder for this user to store their programs # and a folder for recorded datasets (sequences) # folders = [ 'testing/student-folders/%s/programs' % (username), 'testing/student-folders/%s/datasets' % (username) ] for folder in folders: print("Creating student folder %s." % (folder)) _create_folders(folder) # # Add some user metadata # path = '%s/%s/%s/userinfo' % ('testing', 'student-folders', username) content = json.dumps({'is_sso': is_sso}) now = datetime.datetime.now() resource = _create_file(path, now, now, content) # print('Created flow user: %s' % (email)) user = User.query.filter(User.id == user_id).first() return user
def _save(): if type == 'programmeta' or type == 'datasetmeta': contentmetadata = request.values.get('metadata').encode('utf-8') now = datetime.datetime.now() pathm = '%s/%s' % (path, 'metadata') resourcem = _create_file(pathm, now, now, contentmetadata) return json.dumps({ 'success': True, 'message': 'Saved file metadata %s.' % (pathm) }) else: content = request.values.get('content').encode('utf-8') contentmetadata = request.values.get('metadata').encode('utf-8') now = datetime.datetime.now() _create_folders(path) pathp = '%s/%s' % (path, 'program') pathm = '%s/%s' % (path, 'metadata') resourcep = _create_file(pathp, now, now, content) resourcem = _create_file(pathm, now, now, contentmetadata) return json.dumps({ 'success': True, 'message': 'Saved file %s.' % (filename) })
def post(self): args = request.values # get parent path = args.get('path', args.get('parent')) # fix(soon): decide whether to use path or parent if not path: abort(400) parent_resource = find_resource(path) # expects leading slash if not parent_resource: try: # fix(soon): need to traverse up tree to check permissions, not just check org permissions org_name = path.split('/')[1] org_resource = Resource.query.filter(Resource.name == org_name, Resource.parent_id == None, Resource.deleted == False).one() if access_level(org_resource.query_permissions()) < ACCESS_LEVEL_WRITE: abort(403) except NoResultFound: abort(403) _create_folders(path.strip('/')) parent_resource = find_resource(path) if not parent_resource: abort(400) # make sure we have write access to parent if access_level(parent_resource.query_permissions()) < ACCESS_LEVEL_WRITE: abort(403) # get main parameters file = request.files.get('file', None) name = file.filename if file else args['name'] type = int(args['type']) # fix(soon): safe int conversion # get timestamps if 'creation_timestamp' in args: creation_timestamp = parse_json_datetime(args['creation_timestamp']) elif 'creationTimestamp' in args: creation_timestamp = parse_json_datetime(args['creationTimestamp']) else: creation_timestamp = datetime.datetime.utcnow() if 'modification_timestamp' in args: modification_timestamp = parse_json_datetime(args['modification_timestamp']) elif 'modificationTimestamp' in args: modification_timestamp = parse_json_datetime(args['modificationTimestamp']) else: modification_timestamp = creation_timestamp # check for existing resource try: resource = Resource.query.filter(Resource.parent_id == parent_resource.id, Resource.name == name, Resource.deleted == False).one() return {'message': 'Resource already exists.', 'status': 'error'} # fix(soon): return 400 status code except NoResultFound: pass # create resource r = Resource() r.parent_id = parent_resource.id r.organization_id = parent_resource.organization_id r.name = name r.type = type r.creation_timestamp = creation_timestamp r.modification_timestamp = modification_timestamp if type == Resource.FILE: # temporarily mark resource as deleted in case we fail to create resource revision record r.deleted = True else: r.deleted = False if 'user_attributes' in args: r.user_attributes = args['user_attributes'] # we assume that the attributes are already a JSON string # handle sub-types if type == Resource.FILE: # get file contents (if any) from request if file: stream = cStringIO.StringIO() file.save(stream) data = stream.getvalue() else: data = base64.b64decode(args.get('contents', args.get('data', ''))) # fix(clean): remove contents version # convert files to standard types/formgat # fix(soon): should give the user a warning or ask for confirmation if name.endswith('xls') or name.endswith('xlsx'): data = convert_xls_to_csv(data) name = name.rsplit('.')[0] + '.csv' r.name = name if name.endswith('csv') or name.endswith('txt'): data = convert_new_lines(data) # compute other file attributes system_attributes = { 'hash': hashlib.sha1(data).hexdigest(), 'size': len(data), } if 'file_type' in args: # fix(soon): can we remove this? current just using for markdown files system_attributes['file_type'] = args['file_type'] r.system_attributes = json.dumps(system_attributes) elif type == Resource.SEQUENCE: data_type = int(args['data_type']) # fix(soon): safe convert to int system_attributes = { 'max_history': 10000, 'data_type': data_type, } if args.get('decimal_places', '') != '': system_attributes['decimal_places'] = int(args['decimal_places']) # fix(soon): safe convert to int if args.get('min_storage_interval', '') != '': min_storage_interval = int(args['min_storage_interval']) # fix(soon): safe convert to int else: if data_type == Resource.TEXT_SEQUENCE: min_storage_interval = 0 # default to 0 seconds for text sequences (want to record all log entries) else: min_storage_interval = 50 # default to 50 seconds for numeric and image sequences if args.get('units'): system_attributes['units'] = args['units'] system_attributes['min_storage_interval'] = min_storage_interval r.system_attributes = json.dumps(system_attributes) elif type == Resource.REMOTE_FOLDER: r.system_attributes = json.dumps({ 'remote_path': args['remote_path'], }) # save resource record db.session.add(r) db.session.commit() # save file contents (after we have resource ID) and compute thumbnail if needed if type == Resource.FILE: add_resource_revision(r, r.creation_timestamp, data) r.deleted = False # now that have sucessfully created revision, we can make the resource live db.session.commit() # compute thumbnail # fix(soon): recompute thumbnail on resource update if name.endswith('.png') or name.endswith('.jpg'): # fix(later): handle more types, capitalizations for width in [120]: # fix(later): what will be our standard sizes? (thumbnail_contents, thumbnail_width, thumbnail_height) = compute_thumbnail(data, width) # fix(later): if this returns something other than requested width, we'll keep missing the cache thumbnail = Thumbnail() thumbnail.resource_id = r.id thumbnail.width = thumbnail_width thumbnail.height = thumbnail_height thumbnail.format = 'jpg' thumbnail.data = thumbnail_contents db.session.add(thumbnail) # handle the case of creating a controller; requires creating some additional records elif type == Resource.CONTROLLER_FOLDER: # create controller status record controller_status = ControllerStatus() controller_status.id = r.id controller_status.client_version = '' controller_status.web_socket_connected = False controller_status.watchdog_notification_sent = False controller_status.attributes = '{}' db.session.add(controller_status) db.session.commit() # create log sequence create_sequence(r, 'log', Resource.TEXT_SEQUENCE, max_history = 10000) # create a folder for status sequences status_folder = Resource() status_folder.parent_id = r.id status_folder.organization_id = r.organization_id status_folder.name = 'status' status_folder.type = Resource.BASIC_FOLDER status_folder.creation_timestamp = datetime.datetime.utcnow() status_folder.modification_timestamp = status_folder.creation_timestamp db.session.add(status_folder) db.session.commit() # create status sequences create_sequence(status_folder, 'free_disk_space', Resource.NUMERIC_SEQUENCE, max_history = 10000, units = 'bytes') create_sequence(status_folder, 'processor_usage', Resource.NUMERIC_SEQUENCE, max_history = 10000, units = 'percent') create_sequence(status_folder, 'messages_sent', Resource.NUMERIC_SEQUENCE, max_history = 10000) create_sequence(status_folder, 'messages_received', Resource.NUMERIC_SEQUENCE, max_history = 10000) create_sequence(status_folder, 'serial_errors', Resource.NUMERIC_SEQUENCE, max_history = 10000) return {'status': 'ok', 'id': r.id}