def test_gages_dam_attr(self): quick_data_dir = os.path.join(self.config_data.data_path["DB"], "quickdata") data_dir = os.path.join(quick_data_dir, "conus-all_90-10_nan-0.0_00-1.0") df = GagesModel.load_datamodel(data_dir, data_source_file_name='data_source.txt', stat_file_name='Statistics.json', flow_file_name='flow.npy', forcing_file_name='forcing.npy', attr_file_name='attr.npy', f_dict_file_name='dictFactorize.json', var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json') # nid_input = NidModel() nid_input = NidModel(self.config_data.config_file) # nid_dir = os.path.join("/".join(self.config_data.data_path["DB"].split("/")[:-1]), "nid", "quickdata") nid_dir = os.path.join( "/".join(self.config_data.data_path["DB"].split("/")[:-1]), "nid", "test") save_nidinput(nid_input, nid_dir, nid_source_file_name='nid_source.txt', nid_data_file_name='nid_data.shp') data_input = GagesDamDataModel(df, nid_input) serialize_json(data_input.gage_main_dam_purpose, os.path.join(nid_dir, "dam_main_purpose_dict.json"))
def save_datamodel(data_model, num_str=None, **kwargs): if num_str: dir_temp = os.path.join( data_model.data_source.data_config.data_path["Temp"], num_str) else: dir_temp = data_model.data_source.data_config.data_path["Temp"] if not os.path.isdir(dir_temp): os.makedirs(dir_temp) data_source_file = os.path.join(dir_temp, kwargs['data_source_file_name']) stat_file = os.path.join(dir_temp, kwargs['stat_file_name']) flow_file = os.path.join(dir_temp, kwargs['flow_file_name']) forcing_file = os.path.join(dir_temp, kwargs['forcing_file_name']) attr_file = os.path.join(dir_temp, kwargs['attr_file_name']) f_dict_file = os.path.join(dir_temp, kwargs['f_dict_file_name']) var_dict_file = os.path.join(dir_temp, kwargs['var_dict_file_name']) t_s_dict_file = os.path.join(dir_temp, kwargs['t_s_dict_file_name']) serialize_pickle(data_model.data_source, data_source_file) serialize_json(data_model.stat_dict, stat_file) serialize_numpy(data_model.data_flow, flow_file) serialize_numpy(data_model.data_forcing, forcing_file) serialize_numpy(data_model.data_attr, attr_file) # dictFactorize.json is the explanation of value of categorical variables serialize_json(data_model.f_dict, f_dict_file) serialize_json(data_model.var_dict, var_dict_file) serialize_json(data_model.t_s_dict, t_s_dict_file)
def test_dam_coords_storages(self): quick_data_dir = os.path.join(self.config_data.data_path["DB"], "quickdata") data_dir = os.path.join(quick_data_dir, "conus-all_90-10_nan-0.0_00-1.0") data_model_train = GagesModel.load_datamodel( data_dir, data_source_file_name='data_source.txt', stat_file_name='Statistics.json', flow_file_name='flow.npy', forcing_file_name='forcing.npy', attr_file_name='attr.npy', f_dict_file_name='dictFactorize.json', var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json') gages_model_train = GagesModel.update_data_model( self.config_data, data_model_train) data_model_test = GagesModel.load_datamodel( data_dir, data_source_file_name='test_data_source.txt', stat_file_name='test_Statistics.json', flow_file_name='test_flow.npy', forcing_file_name='test_forcing.npy', attr_file_name='test_attr.npy', f_dict_file_name='test_dictFactorize.json', var_dict_file_name='test_dictAttribute.json', t_s_dict_file_name='test_dictTimeSpace.json') gages_model_test = GagesModel.update_data_model( self.config_data, data_model_test, train_stat_dict=gages_model_train.stat_dict) nid_dir = os.path.join( "/".join(self.config_data.data_path["DB"].split("/")[:-1]), "nid", "test") nid_input = NidModel.load_nidmodel( nid_dir, nid_source_file_name='nid_source.txt', nid_data_file_name='nid_data.shp') gage_main_dam_purpose = unserialize_json( os.path.join(nid_dir, "dam_main_purpose_dict.json")) data_input = GagesDamDataModel(gages_model_test, nid_input, gage_main_dam_purpose) dam_coords, dam_storages = data_input.coords_of_dams() serialize_json(dam_coords, os.path.join(nid_dir, "dam_points_dict.json")) serialize_json(dam_storages, os.path.join(nid_dir, "dam_storages_dict.json"))
def index(user, id): try: threads_index = DatetimeCabinet("/home/kite/threads.db") thread = None except IOError: response.status = 400 return # FIXME: use an index for threads entries ? for thr in threads_index[user]["threads_index"]: if thr["id"] == id: thread = thr if thread == None: abort(404, "Thread not found.") thread["unread"] = False threads_index.sync() response.content_type = "application/json" ret_json = { "messages": [], "subject": thread["subject"], "date": thread["date"], "id": thread["id"] } mdir = read_mail("/home/kite/Maildirs/%s" % user) for mail_id in thread["messages"]: ret_json["messages"].append(get_email(mdir, mail_id)) return serialize_json(ret_json, protection=False)
def index(user, id): try: threads_index = DatetimeCabinet("/home/kite/threads.db") thread = None except IOError: response.status = 400 return # FIXME: use an index for threads entries ? for thr in threads_index[user]["threads_index"]: if thr["id"] == id: thread = thr if thread == None: abort(404, "Thread not found.") thread["unread"] = False threads_index.sync() response.content_type = "application/json" ret_json = {"messages": [], "subject": thread["subject"], "date": thread["date"], "id": thread["id"] } mdir = read_mail("/home/kite/Maildirs/%s" % user) for mail_id in thread["messages"]: ret_json["messages"].append(get_email(mdir, mail_id)) return serialize_json(ret_json, protection=False)
def test_data_model_test(self): source_data = unserialize_pickle(self.data_source_test_file) data_model = DataModel(source_data) # 存储data_model,因为data_model里的数据如果直接序列化会比较慢,所以各部分分别序列化,dict的直接序列化为json文件,数据的HDF5 serialize_json(data_model.stat_dict, self.stat_file) serialize_numpy(data_model.data_flow, self.flow_file) serialize_numpy(data_model.data_forcing, self.forcing_file) serialize_numpy(data_model.data_attr, self.attr_file) # dictFactorize.json is the explanation of value of categorical variables serialize_json(data_model.f_dict, self.f_dict_file) serialize_json(data_model.var_dict, self.var_dict_file) serialize_json(data_model.t_s_dict, self.t_s_dict_file)
def index(user): try: threads_index = DatetimeCabinet("/home/kite/threads.db") except IOError: abort(500, "Invalid thread") return ret_threads = [] try: threads = threads_index[user]["threads_index"] except KeyError: threads = [] for thread in threads: ret_threads.append(thread) response.content_type = "application/json" return serialize_json(ret_threads, protection=False)
def index(user): # FIXME: input sanitization - check permissions for user try: threads_index = DatetimeCabinet("/home/kite/threads.db") except IOError: response.status = 400 return ret_threads = [] try: threads = threads_index[user]["threads_index"] except KeyError: threads = [] for thread in threads: ret_threads.append(thread) response.content_type = "application/json" return serialize_json(ret_threads, protection=False)
def ArticleCommentHandler(request, article_id=None): if article_id is None: return JsonResponse({"error": "No article was defined."}, status=400) if request.method == "GET": try: comments = Comment.objects.filter(article=article_id) except Comment.DoesNotExist: return JsonResponse([], status=200) data = [] for comment in comments: json_comment = to_dict(comment) data.append(json_comment) return JsonResponse(data, safe=False) if request.method == "POST": # Create comment user = request.user if user is None: return JsonResponse({"error": "User not authenticated"}, status=401) body = parseJSON(request.body) try: serialized_data = serialize_json(body, 'api.Comment') except (ValueError, KeyError): return JsonResponse({'error': 'JSON is invalid'}, status=409) new_comment = serializers.deserialize('json', serialized_data).next().object new_comment.owner = request.user new_comment.article = Article.objects.get(pk=article_id) new_comment.save() return JsonResponse(to_dict(new_comment))
data_source_file_name='data_source.txt', stat_file_name='Statistics.json', flow_file_name='flow.npy', forcing_file_name='forcing.npy', attr_file_name='attr.npy', f_dict_file_name='dictFactorize.json', var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json') nid_input = NidModel(cfg) nid_dir = os.path.join(cfg.NID.NID_DIR, "test") save_nidinput(nid_input, nid_dir, nid_source_file_name='nid_source.txt', nid_data_file_name='nid_data.shp') data_input = GagesDamDataModel(df, nid_input) serialize_json(data_input.gage_main_dam_purpose, os.path.join(nid_dir, "dam_main_purpose_dict.json")) gage_main_dam_purpose = unserialize_json(nid_gene_file) gage_main_dam_purpose_lst = list(gage_main_dam_purpose.values()) gage_main_dam_purpose_lst_merge = "".join(gage_main_dam_purpose_lst) gage_main_dam_purpose_unique = np.unique( list(gage_main_dam_purpose_lst_merge)) # gage_main_dam_purpose_unique = np.unique(gage_main_dam_purpose_lst) purpose_regions = {} for i in range(gage_main_dam_purpose_unique.size): sites_id = [] for key, value in gage_main_dam_purpose.items(): if gage_main_dam_purpose_unique[i] in value: sites_id.append(key) assert (all(x < y for x, y in zip(sites_id, sites_id[1:]))) purpose_regions[gage_main_dam_purpose_unique[i]] = sites_id id_regions_idx = []
def ArticleHandler(request, article_id=None): if request.method == "GET" and article_id is None: articles = Article.objects.all() data = [] for article in articles: article_dict = to_dict(article) data.append(article_dict) return JsonResponse(data, safe=False) if request.method == "GET" and article_id is not None: try: article = Article.objects.get(pk=article_id) except Article.DoesNotExist: return JsonResponse({"error": "Article does not exist"}, status=404) article_dict = to_dict(article) return JsonResponse(article_dict, status=200) if request.method == "PUT" and article_id is not None: try: article = Article.objects.get(pk=article_id) except Article.DoesNotExist: return JsonResponse({"error": "Article does not exist"}, status=404) if article.owner != request.user: return JsonResponse({"error": "Permission denied"}, status=401) data = parseJSON(request.body) try: article.topic = data["topic"] except KeyError: pass try: article.article_text = data["article_text"] except KeyError: pass article.save() return JsonResponse(to_dict(article), status=200) if request.method == "POST": # Create article user = request.user if user is None: return JsonResponse({"error": "User not authenticated"}, status=401) body = parseJSON(request.body) try: serialized_data = serialize_json(body, 'api.Article') except (ValueError, KeyError): return JsonResponse({'error': 'JSON is invalid'}, status=409) new_article = serializers.deserialize('json', serialized_data).next().object new_article.owner = request.user new_article.save() return JsonResponse(to_dict(new_article)) if request.method == "DELETE" and article_id is not None: try: article = Article.objects.get(pk=article_id) except Article.DoesNotExist: return JsonResponse({"error": "Article does not exist"}, status=404) if article.owner == request.user: article.delete() return HttpResponse(status=204) else: return JsonResponse({"error": "Permission denied"}, status=401)