def update_project(): try: project_dict = json.loads(request.data) world_name = project_dict["project_name"] save_dict = db[current_user.project_collection_name].find_one( {"project_name": world_name}) mdata = save_dict["metadata"] mdata["updated"] = datetime.datetime.utcnow() pdict = make_jsonizable_and_compress(project_dict) new_file_id = fs.put(pdict) fs.delete(save_dict["file_id"]) save_dict["metadata"] = mdata save_dict["file_id"] = new_file_id db[current_user.project_collection_name].update_one( {"project_name": world_name}, {'$set': save_dict}) return_data = { "project_name": world_name, "success": True, "message": "Project Successfully Saved" } except Exception as ex: print("got an error in save_new_project") return_data = {"success": False, "message": "Failed save"} return jsonify(return_data)
def update_user_collections(self, user_obj): couldnt_process = [] for colname in user_obj.data_collections: # print "processing " + colname collection_to_copy = user_obj.full_collection_name(colname) regex = re.compile("__metadata__") doc = db[collection_to_copy].find_one({"name": {"$not": regex}}) if doc is None: print(colname + " only has metadata") couldnt_process.append(colname) continue if "file_id" in doc: # print "file_id exists" continue new_collection_name = user_obj.full_collection_name(colname + "XXXX") for doc in db[collection_to_copy].find(): if not bytes_to_string(doc["name"]) == "__metadata__": if "file_id" in doc: doc_text = fs.get(doc["file_id"]).read() else: doc_text = make_python_object_jsonizable( doc["data_rows"]) del doc["data_rows"] doc["file_id"] = fs.put(doc_text) db[new_collection_name].insert_one(doc) db.drop_collection(collection_to_copy) db[new_collection_name].rename(collection_to_copy) # print "processed " + colname print("couldn't process " + str(couldnt_process)) return
def copy_between_accounts(source_user, dest_user, res_type, new_res_name, res_name): try: name_field = name_keys[res_type] collection_name = source_user.resource_collection_name(res_type) old_dict = db[collection_name].find_one({name_field: res_name}) new_res_dict = {name_field: new_res_name} for (key, val) in old_dict.items(): if (key == "_id") or (key == name_field): continue new_res_dict[key] = val if "metadata" not in new_res_dict: mdata = { "datetime": datetime.datetime.utcnow(), "updated": datetime.datetime.utcnow(), "tags": "", "notes": "" } new_res_dict["metadata"] = mdata else: new_res_dict["metadata"]["datetime"] = datetime.datetime.utcnow() project_dict = read_project_dict(fs, old_dict["file_id"]) project_dict["user_id"] = dest_user.get_id() pdict = make_jsonizable_and_compress(project_dict) new_res_dict["file_id"] = fs.put(pdict) new_collection_name = dest_user.resource_collection_name(res_type) db[new_collection_name].insert_one(new_res_dict) metadata = new_res_dict["metadata"] overall_res = [ metadata, jsonify({ "success": True, "message": "Resource Successfully Copied", "alert_type": "alert-success" }) ] return overall_res except Exception as ex: overall_res = [ None, generic_exception_handler.get_exception_for_ajax( ex, "Error copying resource") ] return overall_res
def save_new_project(): try: project_dict = json.loads(request.data) project_name = project_dict["project_name"] mdata = create_initial_metadata() save_dict = {"metadata": mdata, "project_name": project_name} pdict = make_jsonizable_and_compress(project_dict) save_dict["file_id"] = fs.put(pdict) db[current_user.project_collection_name].insert_one(save_dict) return_data = { "project_name": project_dict["project_name"], "success": True, "message": "Project Successfully Saved" } except Exception as ex: print("got an error in save_new_project") return_data = {"success": False} return jsonify(return_data)
def duplicate_project(self): user_obj = current_user project_to_copy = request.json['res_to_copy'] new_project_name = request.json['new_res_name'] save_dict = db[user_obj.project_collection_name].find_one( {"project_name": project_to_copy}) mdata = save_dict["metadata"] new_save_dict = {"metadata": mdata, "project_name": new_project_name} # uncompressing and compressing below is necessary because we need to change the project_name inside # the project dict. so, essentially, the project_name is stored in two places which is non-optimal # tactic_todo fix project_name being stored in two places in project saves project_dict = read_project_dict(fs, mdata, save_dict["file_id"]) project_dict["project_name"] = new_project_name pdict = make_jsonizable_and_compress(project_dict) new_save_dict["file_id"] = fs.put(pdict) db[user_obj.project_collection_name].insert_one(new_save_dict) new_row = self.build_res_dict(new_project_name, mdata, user_obj) return jsonify({"success": True, "new_row": new_row})