def test_array_numpy_labelled(self): labelled_input = {"a": []} output = ujson.loads(ujson.dumps(labelled_input), numpy=True, labelled=True) assert (np.empty((1, 0)) == output[0]).all() assert (np.array(["a"]) == output[1]).all() assert output[2] is None labelled_input = [{"a": 42}] output = ujson.loads(ujson.dumps(labelled_input), numpy=True, labelled=True) assert (np.array([u("a")]) == output[2]).all() assert (np.array([42]) == output[0]).all() assert output[1] is None # see gh-10837: write out the dump explicitly # so there is no dependency on iteration order input_dumps = ('[{"a": 42, "b":31}, {"a": 24, "c": 99}, ' '{"a": 2.4, "b": 78}]') output = ujson.loads(input_dumps, numpy=True, labelled=True) expected_vals = np.array( [42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2)) assert (expected_vals == output[0]).all() assert output[1] is None assert (np.array([u("a"), "b"]) == output[2]).all() input_dumps = ('{"1": {"a": 42, "b":31}, "2": {"a": 24, "c": 99}, ' '"3": {"a": 2.4, "b": 78}}') output = ujson.loads(input_dumps, numpy=True, labelled=True) expected_vals = np.array( [42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2)) assert (expected_vals == output[0]).all() assert (np.array(["1", "2", "3"]) == output[1]).all() assert (np.array(["a", "b"]) == output[2]).all()
def test_encode_non_c_locale(self): lc_category = locale.LC_NUMERIC # We just need one of these locales to work. for new_locale in ("it_IT.UTF-8", "Italian_Italy"): if tm.can_set_locale(new_locale, lc_category): with tm.set_locale(new_locale, lc_category): assert ujson.loads(ujson.dumps(4.78e60)) == 4.78e60 assert ujson.loads("4.78", precise_float=True) == 4.78 break
def test_encode_null_character(self): wrapped_input = "31337 \x00 1337" output = ujson.encode(wrapped_input) assert wrapped_input == json.loads(output) assert output == json.dumps(wrapped_input) assert wrapped_input == ujson.decode(output) alone_input = "\x00" output = ujson.encode(alone_input) assert alone_input == json.loads(output) assert output == json.dumps(alone_input) assert alone_input == ujson.decode(output) assert '" \\u0000\\r\\n "' == ujson.dumps(" \u0000\r\n ")
def Verfiy(request, *args, **kwargs): if request.method == 'POST': token = request.POST.get('u_token') user = User.objects.filter(u_token=token) if user: userinfo = User.objects.get(u_token=token) userinfo_dict = model_to_dict(userinfo) new_token = GetUserToken(userinfo_dict) if token == new_token: return func(request, *args, **kwargs) else: User.objects.filter(u_token=userinfo.u_token).update( u_token=new_token) # return HttpResponseRedirect('/') result = {"result": 'success', "message": '登陆失效'} return HttpResponse(json.dumps(result), content_type="application/json") else: result = {"result": 'success', "message": '被人踢了'} return HttpResponse(json.dumps(result), content_type="application/json") else: return HttpResponseRedirect('/')
def test_encode_null_character(self): wrapped_input = "31337 \x00 1337" output = ujson.encode(wrapped_input) assert wrapped_input == json.loads(output) assert output == json.dumps(wrapped_input) assert wrapped_input == ujson.decode(output) alone_input = "\x00" output = ujson.encode(alone_input) assert alone_input == json.loads(output) assert output == json.dumps(alone_input) assert alone_input == ujson.decode(output) assert '" \\u0000\\r\\n "' == ujson.dumps(u(" \u0000\r\n "))
def test_array_numpy_labelled(self): labelled_input = {"a": []} output = ujson.loads(ujson.dumps(labelled_input), numpy=True, labelled=True) assert (np.empty((1, 0)) == output[0]).all() assert (np.array(["a"]) == output[1]).all() assert output[2] is None labelled_input = [{"a": 42}] output = ujson.loads(ujson.dumps(labelled_input), numpy=True, labelled=True) assert (np.array(["a"]) == output[2]).all() assert (np.array([42]) == output[0]).all() assert output[1] is None # see gh-10837: write out the dump explicitly # so there is no dependency on iteration order input_dumps = ('[{"a": 42, "b":31}, {"a": 24, "c": 99}, ' '{"a": 2.4, "b": 78}]') output = ujson.loads(input_dumps, numpy=True, labelled=True) expected_vals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape( (3, 2)) assert (expected_vals == output[0]).all() assert output[1] is None assert (np.array(["a", "b"]) == output[2]).all() input_dumps = ('{"1": {"a": 42, "b":31}, "2": {"a": 24, "c": 99}, ' '"3": {"a": 2.4, "b": 78}}') output = ujson.loads(input_dumps, numpy=True, labelled=True) expected_vals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape( (3, 2)) assert (expected_vals == output[0]).all() assert (np.array(["1", "2", "3"]) == output[1]).all() assert (np.array(["a", "b"]) == output[2]).all()
def save_adata_pq(adata, schema, output_directory): import pandas._libs.json as ujson logger.info('Save adata') X_dir = os.path.join(output_directory, 'X') obs_dir = os.path.join(output_directory, 'obs') obsm_dir = os.path.join(output_directory, 'obsm') os.makedirs(X_dir, exist_ok=True) os.makedirs(obs_dir, exist_ok=True) os.makedirs(obsm_dir, exist_ok=True) with gzip.open(os.path.join(output_directory, 'index.json.gz'), 'wt') as f: # json.dump(result, f) f.write(ujson.dumps(schema, double_precision=2, orient='values')) save_adata_X(adata, X_dir) save_data_obs(adata, obs_dir) save_data_obsm(adata, obsm_dir)
def save_dataset_jsonl(dataset, schema, output_dir, base_name, filesystem): compress = False index = {} # key to byte start-end filesystem.makedirs(output_dir, exist_ok=True) jsonl_path = os.path.join(output_dir, base_name) with filesystem.open(jsonl_path, 'wb') as f: save_adata_X(dataset, f, index, compress) save_data_obs(dataset, f, index, compress) save_data_obsm(dataset, f, index, compress) write_jsonl(schema, f, 'schema', index) with filesystem.open(os.path.join(output_dir, base_name + '.idx.json'), 'wt') as f: # save index # json.dump(result, f) result = dict(index=index, file=os.path.basename(jsonl_path)) f.write(ujson.dumps(result, double_precision=2, orient='values'))
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0, freeze_panes=None): # Write the frame cells using xlwt. sheet_name = self._get_sheet_name(sheet_name) if sheet_name in self.sheets: wks = self.sheets[sheet_name] else: wks = self.book.add_sheet(sheet_name) self.sheets[sheet_name] = wks if _validate_freeze_panes(freeze_panes): wks.set_panes_frozen(True) wks.set_horz_split_pos(freeze_panes[0]) wks.set_vert_split_pos(freeze_panes[1]) style_dict = {} for cell in cells: val, fmt = self._value_with_fmt(cell.val) stylekey = json.dumps(cell.style) if fmt: stylekey += fmt if stylekey in style_dict: style = style_dict[stylekey] else: style = self._convert_to_style(cell.style, fmt) style_dict[stylekey] = style if cell.mergestart is not None and cell.mergeend is not None: wks.write_merge( startrow + cell.row, startrow + cell.mergestart, startcol + cell.col, startcol + cell.mergeend, val, style, ) else: wks.write(startrow + cell.row, startcol + cell.col, val, style)
def write_cells( self, cells, sheet_name: str | None = None, startrow: int = 0, startcol: int = 0, freeze_panes: tuple[int, int] | None = None, ) -> None: # Write the frame cells using xlsxwriter. sheet_name = self._get_sheet_name(sheet_name) if sheet_name in self.sheets: wks = self.sheets[sheet_name] else: wks = self.book.add_worksheet(sheet_name) self.sheets[sheet_name] = wks style_dict = {"null": None} if validate_freeze_panes(freeze_panes): wks.freeze_panes(*(freeze_panes)) for cell in cells: val, fmt = self._value_with_fmt(cell.val) stylekey = json.dumps(cell.style) if fmt: stylekey += fmt if stylekey in style_dict: style = style_dict[stylekey] else: style = self.book.add_format( _XlsxStyler.convert(cell.style, fmt)) style_dict[stylekey] = style if cell.mergestart is not None and cell.mergeend is not None: wks.merge_range( startrow + cell.row, startcol + cell.col, startrow + cell.mergestart, startcol + cell.mergeend, val, style, ) else: wks.write(startrow + cell.row, startcol + cell.col, val, style)
def getUserListInfo(request): if request.method == 'POST': u_token = request.POST.get('u_token') user = User.objects.filter(u_token=u_token, identity=2) if user: usersList = list( User.objects.filter(identity=1, status=1).values('name', 'u_token')) result = { "result": 'success', "message": '查询成功', "usersList": usersList } else: result = {"result": 'false', "message": '无权限'} return HttpResponse(json.dumps(result), content_type="application/json")
def save_dataset_pq(dataset, schema, output_directory, filesystem, whitelist): X_dir = os.path.join(output_directory, "X") obs_dir = os.path.join(output_directory, "obs") obsm_dir = os.path.join(output_directory, "obsm") filesystem.makedirs(X_dir, exist_ok=True) filesystem.makedirs(obs_dir, exist_ok=True) filesystem.makedirs(obsm_dir, exist_ok=True) with filesystem.open(os.path.join(output_directory, "index.json.gz"), "wt", compression="gzip") as f: f.write(ujson.dumps(schema, double_precision=2, orient="values")) if whitelist is None or "X" in whitelist: save_adata_X(dataset, X_dir, filesystem) if whitelist is None or "obs" in whitelist: save_data_obs(dataset, obs_dir, filesystem) if whitelist is None or "obsm" in whitelist: save_data_obsm(dataset, obsm_dir, filesystem)
def save_dataset_pq(dataset, schema, output_directory, filesystem, whitelist): X_dir = os.path.join(output_directory, 'X') obs_dir = os.path.join(output_directory, 'obs') obsm_dir = os.path.join(output_directory, 'obsm') filesystem.makedirs(X_dir, exist_ok=True) filesystem.makedirs(obs_dir, exist_ok=True) filesystem.makedirs(obsm_dir, exist_ok=True) with filesystem.open(os.path.join(output_directory, 'index.json.gz'), 'wt', compression='gzip') as f: f.write(ujson.dumps(schema, double_precision=2, orient='values')) if whitelist is None or 'X' in whitelist: save_adata_X(dataset, X_dir, filesystem) if whitelist is None or 'obs' in whitelist: save_data_obs(dataset, obs_dir, filesystem) if whitelist is None or 'obsm' in whitelist: save_data_obsm(dataset, obsm_dir, filesystem)
def sentiment(): api_key = request.headers.get('api_key') # if not request.get_json(): # abort(400) if api_key!='123456789' or api_key==None: return '401' jObj = json.dumps((request.data).decode("utf-8")) jObj=str(jObj).replace('{ "body":','').replace('}','') text = cleanText(jObj) restp=sentiments(text) try: return restp except: return "Not proper sentence"
def prediction_model(): import numpy as np # Store the request from JS data = request.args.get('post', 0, type=str) data = data.split() main_temp = float(data[0]) main_pressure = int(data[1]) main_humidity = int(data[2]) wind_speed = float(data[3]) date = (data[4]) d = datetime.datetime.strptime(date, "%Y-%m-%d") date = d.strftime("%A") minute = (data[5]) station = int(data[6]) d = datetime.datetime.strptime(minute, "%H:%M") hours = int(d.hour) minute = int(d.minute) print("Data to be sent to the prediction model ", data) print(type(data)) prediction_input = [[ station, main_temp, main_pressure, main_humidity, wind_speed, hours, minute ]] if date == "Monday": x = monday.predict(prediction_input) elif date == "Tuesday": x = tuesday.predict(prediction_input) elif date == "Wednesday": x = wednesday.predict(prediction_input) elif date == "Thurday": x = thursday.predict(prediction_input) elif date == "Friday": x = friday.predict(prediction_input) elif date == "Saturday": x = saturday.predict(prediction_input) elif date == "Sunday": x = sunday.predict(prediction_input) print("Predicted available bikes for selected station is", int(x[0])) # Fetch the ML model output and return as JSON to client prediction = [int(x[0])] return json.dumps(prediction)
def cluster(d): df2, tweets = d # K-means clustering km = KMeans(n_clusters=8, n_init=100) # try 100 different initial centroids km.fit(df2) cluster = [] cluster_stat = dict() # Print zip codes that belong to cluster 2 for idx, cls in enumerate(km.labels_): # print idx,cls if cluster_stat.has_key(cls): cluster_stat[cls] += 1 else: cluster_stat[cls] = 1 open('data/cluster/cluster-{0}.txt'.format(cls), 'a').write(json.dumps(tweets[idx]) + '\r\n')
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0, freeze_panes=None): # Write the frame cells using xlwt. sheet_name = self._get_sheet_name(sheet_name) if sheet_name in self.sheets: wks = self.sheets[sheet_name] else: wks = self.book.add_sheet(sheet_name) self.sheets[sheet_name] = wks if _validate_freeze_panes(freeze_panes): wks.set_panes_frozen(True) wks.set_horz_split_pos(freeze_panes[0]) wks.set_vert_split_pos(freeze_panes[1]) style_dict = {} for cell in cells: val, fmt = self._value_with_fmt(cell.val) stylekey = json.dumps(cell.style) if fmt: stylekey += fmt if stylekey in style_dict: style = style_dict[stylekey] else: style = self._convert_to_style(cell.style, fmt) style_dict[stylekey] = style if cell.mergestart is not None and cell.mergeend is not None: wks.write_merge(startrow + cell.row, startrow + cell.mergestart, startcol + cell.col, startcol + cell.mergeend, val, style) else: wks.write(startrow + cell.row, startcol + cell.col, val, style)
def update_job(self, email, job_id, status, result): if not self.capabilities()[SERVER_CAPABILITY_JOBS]: return collection = self.db.jobs doc = collection.find_one(dict(_id=ObjectId(job_id))) self.get_dataset(email, doc['dataset_id']) if doc.get('readonly', False): raise InvalidUsage('Not authorized', 403) if result is not None: if os.environ.get( CIRRO_JOB_RESULTS) is not None: # save to directory result = save_job_result_to_file(result, job_id) else: result = ujson.dumps(result, double_precision=2, orient='values') result = str(self.get_gridfs().put(result, encoding='ascii')) collection.update_one(dict(_id=ObjectId(job_id)), {'$set': dict(status=status, result=result)})
def view_report_detail_by_sort(request): str_start_date = '2020/03/10' start_date = datetime.datetime.strptime(str_start_date, '%Y/%m/%d').date() str_end_date = '2020/05/01' end_date = datetime.datetime.strptime(str_end_date, '%Y/%m/%d').date() # ソートの種類をフォームから取得 sort_type = int(request.GET.get('name_sort_type', None)) # 上位の国を取得する filter_countries = logics.get_filter_countries(sort_type=sort_type, report_date=end_date, number_of_countries=5) # レポートの詳細を取得 dict_report_detail = logics.get_report_detail(start_date, end_date, 7, filter_countries) return JsonResponse(json.dumps(dict_report_detail), safe=False)
def close_restaurant(request): response = {} state = request.GET.get('state') companyId = request.GET.get('companyId') s = requests.session() information = {'state':state,'companyId':companyId} a = s.put( 'http://39.98.52.189:82/api/open/companyList', headers={'auth': 'gAAAAABcfjh_HP3zWYfdz_j1Cs15uECzHHNt3ujXJtV5C_' 'mbWuk-xEeshqunTYacVQVglTqa1pIhaESn3iuqLu9b6UFIhZ-wt4V6hCqJX3vLFQuc5cPP1_' 'qPiQcsl3fH6y-NDE3TqS4qKXZY9_gUkHl09eDMZeHgxw==', 'Content-Type':'application/json'}, data=json.dumps(information)) j = json.loads(a.text) print(j['msg']) if j['msg'] == 'success': response = {"res": "ok"} else: response = {"res": "wrong"} return JsonResponse(response, safe=False)
def save_datasets_h5ad(datasets, schema, output_directory, filesystem, whitelist): adata = None module_dataset = None for dataset in datasets: if dataset.uns.get(ADATA_MODULE_UNS_KEY) is not None: module_dataset = dataset.uns[ADATA_MODULE_UNS_KEY] else: adata = dataset adata.strings_to_categoricals() if module_dataset is not None: module_dataset.strings_to_categoricals() d = dict(X=module_dataset.X, var=module_dataset.var) adata.uns['module'] = d # with filesystem.open(os.path.join(output_directory, 'index.json.gz'), 'wt', compression='gzip') as out: # out.write(ujson.dumps(schema, double_precision=2, orient='values')) pg_marker_keys = get_pegasus_marker_keys(adata) for key in list(adata.varm.keys()): if key not in pg_marker_keys: del adata.varm[key] sc_marker_keys = get_scanpy_marker_keys(adata) uns_whitelist = set(['modules', 'cirro-schema']) adata.uns['cirro-schema'] = ujson.dumps(schema, double_precision=2, orient='values') for key in list(adata.uns.keys()): if key in uns_whitelist: continue keep = False if key in sc_marker_keys: keep = True elif key.endswith('_colors'): field = key[0:len(key) - len('_colors')] if field in dataset.obs: keep = True if not keep: del adata.uns[key] adata.write(output_directory)
def build_dictionary(self, output_file): film_list = [] for row in self.extended_dataframe.iterrows(): element = row[1].to_dict() tropes = list(element.keys())[6:] filtered_tropes = [key for key in tropes if element[key] == 1] film = { 'name': element['NameIMDB'], 'rating': element['Rating'], 'tropes': filtered_tropes } film_list.append(film) #if len(film_list)%100==0: # print(len(film_list)) json_str = json.dumps(film_list) + "\n" json_bytes = json_str.encode('utf-8') with bz2.open(output_file, "wb") as f: f.write(json_bytes) pass
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0, freeze_panes=None): # Write the frame cells using xlsxwriter. sheet_name = self._get_sheet_name(sheet_name) if sheet_name in self.sheets: wks = self.sheets[sheet_name] else: wks = self.book.add_worksheet(sheet_name) self.sheets[sheet_name] = wks style_dict = {'null': None} if _validate_freeze_panes(freeze_panes): wks.freeze_panes(*(freeze_panes)) for cell in cells: val, fmt = self._value_with_fmt(cell.val) stylekey = json.dumps(cell.style) if fmt: stylekey += fmt if stylekey in style_dict: style = style_dict[stylekey] else: style = self.book.add_format( _XlsxStyler.convert(cell.style, fmt)) style_dict[stylekey] = style if cell.mergestart is not None and cell.mergeend is not None: wks.merge_range(startrow + cell.row, startcol + cell.col, startrow + cell.mergestart, startcol + cell.mergeend, cell.val, style) else: wks.write(startrow + cell.row, startcol + cell.col, val, style)
def applyMobile(request): if request.method == 'POST': mid = request.POST.get('mid') u_token = request.POST.get('u_token') user = User.objects.filter(u_token=u_token, identity=1, status=1) if user: userinfo = User.objects.get(u_token=u_token, identity=1, status=1) u_id = userinfo.id name = userinfo.name isHasNoApplied = Mobile.objects.filter(id=mid, applyUserid=0) if isHasNoApplied: try: Mobile.objects.filter(id=mid).update(applyUserid=u_id, applyUserName=name) result = {"result": 'false', "message": '申请成功'} except Exception: result = {"result": 'false', "message": '数据库操作失败'} else: result = {"result": 'false', "message": '正在申请中,或被借出'} else: result = {"result": 'false', "message": '用户状态不正常'} return HttpResponse(json.dumps(result), content_type="application/json")
def updateinformation(request): flag = False flag1 = False if request.method == "POST": if request.user.is_authenticated(): username = request.user.username if username == request.POST['email']: # Deleting the models from the signupModel,Django provided model # Deleting the Signup model automatically deletes the WorkExperiences,Educations Model # print(request.user.username) logout(request) User.objects.filter(username=username).delete() print(request.user.username) # signupModel.objects.filter(email=username).delete() # Creating Everything new from here on user1 = User.objects.create_user(request.POST['email'], password=request.POST['password']) print(request.user.username) # Creating the user Signup Model signUpModel = signupModel.objects.create(user=user1, name=request.POST['name'], dateofbirth=request.POST['dob'], gender=request.POST.get('gender', None), email=request.POST['email'], password=request.POST['password'], skills=json.dumps(request.POST.getlist('skills[]')), interests=request.POST.getlist('interests[]'), objectivestatement=request.POST['objstat'], country=request.POST['country'], city=request.POST['City']) signUpModel.save() # Getting the list from the input tags listofcompanies = request.POST.getlist('Company[]') listofpositions = request.POST.getlist('Position[]') listofstartdates = request.POST.getlist('startdates[]') listofenddates = request.POST.getlist('enddates[]') listofdegrees = request.POST.getlist('degreenames[]') listofinstitution = request.POST.getlist('institution[]') listofstartdate1 = request.POST.getlist('startdates1[]') listofenddate1 = request.POST.getlist('enddates1[]') ArrayContainingExperiencesObject = [] # Creating objects for the users for i in range(0, len(listofcompanies)): temp = workexperienceModel.objects.create(id=None, company=listofcompanies[i], position=listofpositions[i], startDate=listofstartdates[i], endDate=listofenddates[i], UserExperience=signUpModel) ArrayContainingExperiencesObject.append(temp) for j in range(1, len(ArrayContainingExperiencesObject)): ArrayContainingExperiencesObject[j].save() ArrayContainingEducationObject = [] for i in range(0, len(listofdegrees)): temp1 = Education.objects.create(id=None, degree=listofdegrees[i], institution=listofinstitution[i], startdateedu=listofstartdate1[i], enddateedu=listofenddate1[i], UserEducation=signUpModel) ArrayContainingEducationObject.append(temp1) for k in range(1, len(ArrayContainingEducationObject)): ArrayContainingEducationObject[k].save() login(request, user1) return render(request, 'jobs.html', {'error': "Bio Updated Successfully"}) else: flag = username_present(request.POST['email']) if flag == True: username = request.user.username UserRecord = models.signupModel.objects.filter(email=username) UserEducations = models.Education.objects.filter(UserEducation=UserRecord) UserExperiences = models.workexperienceModel.objects.filter(UserExperience=UserRecord) skills = UserRecord[0].skills.replace('"', '').replace('[', '').replace(']', '').split(",") interests1 = UserRecord[0].interests.replace("'", "").replace(" ", "").replace("[", "").replace("]", "").split( ",") if UserRecord[0].gender == 'male': flag = True else: flag1 = True return render(request, 'EditProfile.html', {'UserRecord': UserRecord, 'UserEducation': UserEducations, 'UserExperience': UserExperiences, "flag": flag, "flag1": flag1, "Skills": skills, "Interests": interests1, "error": "Email already taken please try a different email!" }) else: logout(request) # Deleting the models from the signupModel,Django provided model # Deleting the Signup model automatically deletes the WorkExperiences,Educations Model User.objects.filter(username=username).delete() # signupModel.objects.filter(email=username).delete() # Creating Everything new from here on user1 = User.objects.create_user(request.POST['email'], password=request.POST['password']) # Creating the user Signup Model signUpModel = signupModel.objects.create(user=user1, name=request.POST['name'], dateofbirth=request.POST['dob'], gender=request.POST.get('gender', None), email=request.POST['email'], password=request.POST['password'], skills=json.dumps(request.POST.getlist('skills[]')), interests=request.POST.getlist('interests[]'), objectivestatement=request.POST['objstat'], country=request.POST['country'], city=request.POST['City']) signUpModel.save() # Getting the list from the input tags listofcompanies = request.POST.getlist('Company[]') listofpositions = request.POST.getlist('Position[]') listofstartdates = request.POST.getlist('startdates[]') listofenddates = request.POST.getlist('enddates[]') listofdegrees = request.POST.getlist('degreenames[]') listofinstitution = request.POST.getlist('institution[]') listofstartdate1 = request.POST.getlist('startdates1[]') listofenddate1 = request.POST.getlist('enddates1[]') ArrayContainingExperiencesObject = [] # Creating objects for the users for i in range(0, len(listofcompanies)): temp = workexperienceModel.objects.create(id=None, company=listofcompanies[i], position=listofpositions[i], startDate=listofstartdates[i], endDate=listofenddates[i], UserExperience=signUpModel) ArrayContainingExperiencesObject.append(temp) for j in range(1, len(ArrayContainingExperiencesObject)): ArrayContainingExperiencesObject[j].save() ArrayContainingEducationObject = [] for i in range(0, len(listofdegrees)): temp1 = Education.objects.create(id=None, degree=listofdegrees[i], institution=listofinstitution[i], startdateedu=listofstartdate1[i], enddateedu=listofenddate1[i], UserEducation=signUpModel) ArrayContainingEducationObject.append(temp1) for k in range(1, len(ArrayContainingEducationObject)): ArrayContainingEducationObject[k].save() login(request, user1) return render(request, 'jobs.html', {'error': "Bio Updated Successfully"}) return render(request, 'EditProfile.html')
def signup(request): if request.method == "POST": try: User.objects.get(username=request.POST['username']) return render(request, 'Signupform.html', {'error': 'Email has already been taken!Try Another Mail'}) except User.DoesNotExist: # Here the stuff will also be saved in the MongoDB in order to generate the Recommendation # for the User which has just signed in client = MongoClient('localhost:27017') db = client.ResumeDatabase # Inserting the ID of the Relevant User no_of_documents = db.Person.count(); ID = no_of_documents + 1 db.Person.insert_one( { "ID": ID, }) # Making Profile Data Variable for TF-IDF Comparison # Initially profileData String is populated with Position Applied profileData = "not mentioned" db.Person.update( {"ID": ID}, {"$set": {"Position Applied": "not mentioned"}} ) # Populating the Profile Data with the Userlocation which is the concatenation of # of the city and the country profileData += " " + request.POST['City'] + request.POST['Country'] db.Person.update( {"ID": ID}, {"$set": {"User Location": request.POST['City'] + request.POST['Country']}} ) # Populating the profileData with the objective the user has entered in the box profileData += " " + request.POST['textarea'] db.Person.update( {"ID": ID}, {"$set": {"Objective": request.POST['textarea']}} ) listofcompanies1 = request.POST.getlist('Company[]') listofpositions1 = request.POST.getlist('Position[]') listofstartdates1 = request.POST.getlist('startdates[]') listofenddates1 = request.POST.getlist('enddates[]') listofDescription1 = request.POST.getlist('Descriptions[]') for i in range(0, len(listofcompanies1)): profileData += " " + listofcompanies1[i] profileData += " " + listofpositions1[i] profileData += " " + listofDescription1[i] db.Person.update( {"ID": ID}, {"$push": {"Work Experience": { "ExperienceID": i, "Company": listofcompanies1[i], "Title": listofpositions1[i], "Dates": listofstartdates1[i] + ' to ' + listofenddates1[i], "Description": listofDescription1[i] }}} ) listofdegrees1 = request.POST.getlist('degreenames[]') listofinstitution1 = request.POST.getlist('institution[]') listofstartdates2 = request.POST.getlist('startdates1[]') listofenddates2 = request.POST.getlist('enddates1[]') for i in range(0, len(listofdegrees1)): profileData += " " + listofinstitution1[i] profileData += " " + listofdegrees1[i] db.Person.update( {"ID": ID}, {"$push": {"Education": { "EducationID": i, "School": listofinstitution1[i], "Title": listofdegrees1[i], "Dates": listofstartdates2[i] + ' to ' + listofenddates2[i], }}} ) skills_coming = request.POST.getlist('skills[]') interests_coming = request.POST.getlist('interests[]') for i in range(0, len(skills_coming)): profileData += " " + skills_coming[i] db.Person.update( {"ID": ID}, {"$push": {"Skills": { "Skill": skills_coming[i], }}} ) temp1 = "" for i in range(0, len(interests_coming)): if i == len(interests_coming) - 1: temp1 = temp1 + interests_coming[i] else: temp1 = temp1 + interests_coming[i] + ',' profileData += " " + temp1 db.Person.update( {"ID": ID}, {"$set": {"Additional Information": temp1}} ) db.Person.update( {"ID": ID}, {"$set": {"Profile Data": profileData}} ) user1 = User.objects.create_user(request.POST['username'], password=request.POST['password']) #Creating the user Signup Model signUpModel = signupModel.objects.create(user=user1, name=request.POST['name'], dateofbirth=request.POST['dob'], gender=request.POST.get('gender', None), email=request.POST['username'], password=request.POST['password'], skills=json.dumps(request.POST.getlist('skills[]')), interests=request.POST.getlist('interests[]'), objectivestatement=request.POST['textarea'], country=request.POST['Country'], city=request.POST['City'], idformongo=ID) signUpModel.save() # # # Getting the list from the input tags listofcompanies = request.POST.getlist('Company[]') listofpositions = request.POST.getlist('Position[]') listofstartdates = request.POST.getlist('startdates[]') listofenddates = request.POST.getlist('enddates[]') listofdegrees = request.POST.getlist('degreenames[]') listofinstitution = request.POST.getlist('institution[]') listofstartdate1 = request.POST.getlist('startdates1[]') listofenddate1 = request.POST.getlist('enddates1[]') # ArrayContainingExperiencesObject = [] # Creating objects for the users for i in range(0, len(listofcompanies)): temp = workexperienceModel.objects.create(id=None, company=listofcompanies[i], position=listofpositions[i], startDate=listofstartdates[i], endDate=listofenddates[i], UserExperience=signUpModel) ArrayContainingExperiencesObject.append(temp) for j in range(1, len(ArrayContainingExperiencesObject)): ArrayContainingExperiencesObject[j].save() ArrayContainingEducationObject = [] for i in range(0, len(listofdegrees)): temp1 = Education.objects.create(id=None, degree=listofdegrees[i], institution=listofinstitution[i], startdateedu=listofstartdate1[i], enddateedu=listofenddate1[i], UserEducation=signUpModel) ArrayContainingEducationObject.append(temp1) for k in range(1, len(ArrayContainingEducationObject)): ArrayContainingEducationObject[k].save() # Setting the session login(request, user1) UserRecord = models.signupModel.objects.filter(email=request.POST['username']) return render(request, 'jobs.html') # return render(request, 'MainPage.html', {'UserRecord': UserRecord}) else: return render(request, 'Signupform.html')
def process_item(self,item,spider): lines=json.dumps(dict(item),ensure_ascii=False)+'\n' self.file.write(lines) return item
def detect_img(yolo): result_json = [] not_cont = 0 with open("result_11_25_2.txt", "w") as f: k = 0 # for i in range(50803): for file in os.listdir(test_path): # path = 'F:/比赛事宜/裂纹识别/复赛数据/challengedataset-semifinal/test/test/{}.jpg'.format(i + 1) path = os.path.join(test_path, file) img = Image.open(path) print(path) img, boxes, scores, classes = yolo.detect_image(img) i = 0 for j in classes: if j > 20 or j < 0: print(".......j", j) j = 0 return temp_result = dict( name=file, category=j, bbox=["%.2f" % x for x in boxes[i]], score=scores[i], ) i += 1 print(json.dumps(temp_result)) result_json.append(temp_result) if i > 0: not_cont += 1 # print(boxes) # print("类别为:", classes) # print(file) save_path = os.path.join(image_path, file) save_path_2 = os.path.join(image_not_path, file) if (len(boxes) > 0): if (len(boxes) == 1): w = boxes[0][3] - boxes[0][1] h = boxes[0][2] - boxes[0][0] ratio = w / h print(ratio) if (boxes[0][0] < 200): # 此步骤是为了抑制检测出的圆管 img.save(save_path) f.write("{} {}\n".format(file, 0)) k = k + 1 # elif(w*h<10000):#此步骤是为了抑制检测出较小检测框 # img.save('F:/image/test_result_11_25/{}.jpg'.format(i + 1)) # f.write("{}.jpg {}\n".format(i + 1, 0)) # k = k + 1 else: img.save(save_path_2) f.write("{} {}\n".format(file, 1)) k = k + 1 else: img.save(save_path_2) f.write("{} {}\n".format(file, 1)) k = k + 1 else: img.save(save_path) f.write("{} {}\n".format(file, 0)) k = k + 1 print(k) yolo.close_session() res = json.dumps(result_json) with open("result.json", "w") as f: f.write(res) print("save over") print(not_cont)
def test_array_numpy_except(self, bad_input, exc_type, err_msg, kwargs): with pytest.raises(exc_type, match=err_msg): ujson.decode(ujson.dumps(bad_input), numpy=True, **kwargs)
def test_array_numpy_except(self, bad_input, exc_type, kwargs): with pytest.raises(exc_type): ujson.decode(ujson.dumps(bad_input), numpy=True, **kwargs)
def get_furnaces(self): """ :return: Json Response """ try: assert self._db_connection, { STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR, MESSAGE_KEY: DB_ERROR } if self.query_params: equipment = self.query_params[EQUIPMENT].split(",") """ This condition for used to it will select single equipment also even multiple equipment also based on user selection """ if len(equipment) == 1: equipment_param = '(' + str(equipment[0]) + ')' equipment_param = '(' + str(equipment[0]) + ')' else: equipment_param = tuple(equipment) perform_list_all = [] try: if self.query_params[IS_ACTIVE] == "true" and int( equipment[0]) < 15: self._psql_session.execute( MULTIPLE_CONFIG_EQUIPMENT.format( self.query_params[IS_ACTIVE], equipment_param, self.query_params[FEED_NAME])) elif int(equipment[0]) > 14: self._psql_session.execute( NON_FURNACE_EXTERNAL_TARGETS.format( equipment_param)) else: pass df = pd.DataFrame(self._psql_session.fetchall()) dt = df.groupby('equipment_tag_name').apply( lambda x: x.to_json(orient='records')) df.sort_values('parameter', ascending=True, inplace=True) obj = {} array = [] for each_data in dt: for each in json.loads(each_data): obj[each['equipment_tag_name']] = { 'external_targets': json.loads(each_data), 'performance_tags': None } perform = [] try: self._psql_session.execute( MULTIPLE_CONFIG_CASE_NAME_PERFORMACE_TAGS.format( equipment_param)) except Exception as e: log_error( 'Exception due to get_furnaces Function: %s' + str(e)) performance_list = json.loads( json.dumps(self._psql_session.fetchall())) perf_list = json.loads(json.dumps(performance_list)) try: self._psql_session.execute( ALL_PERF_TAGS_FOR_NON_FURNACES.format( equipment_param)) except Exception as e: log_error( 'Exception due to get_furnaces Function: %s' + str(e)) perameter_list = json.loads( json.dumps(self._psql_session.fetchall())) perform_list = json.loads(json.dumps(perameter_list)) if len(perform_list) > 0: for each_perform in perform_list: perform_list_all.append(each_perform['result']) else: pass for each_data in perf_list: try: obj[each_data["equipment_tag_name"]][ "performance_tags"] = each_data['case_name'] except Exception as err: pass for each_data in perform_list_all: try: obj[each_data["equipment_tag_name"]][ "performance_tags_list"] = each_data[ 'parameter'] except Exception as err: pass return JsonResponse(obj, safe=False, status=200) except Exception as e: log_error('Exception due to get_furnaces Function: %s' + str(e)) return JsonResponse({"message": str(e)}, safe=False) except AssertionError as e: log_error('Exception due to get_furnaces Function: %s' + str(e)) return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]}, status=e.args[0][STATUS_KEY]) except Exception as e: log_error(traceback.format_exc()) return JsonResponse( {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())}, status=HTTP_500_INTERNAL_SERVER_ERROR)
def update_equip(self, body, equipment, feed_name, is_active): """ This function will update external targets """ try: assert self._db_connection, { STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR, MESSAGE_KEY: DB_ERROR } conn = pg_connection() if conn: cursor = conn.cursor() try_now = list(body.keys()) count = 0 counter = 0 performance_case_name = body[ try_now[count]]['performance_tags'] if is_active == 'true' and int(equipment[0]) < 15: try: try: for each_equipment in equipment: try: for each in json.loads( json.dumps(body[try_now[counter]] ['external_targets'])): if float(each["min"]) <= float( each['target']): update_external_lbt = SET_UPDATE_EXTERNAL_TAGS_LBT.format( ('Between' + ' ' + each["min"] + ' and ' + each["max"]), each['target'], each['is_active'], is_active, each_equipment, feed_name, each['parameter']) cursor.execute(update_external_lbt) else: pass counter += 1 except Exception as e: pass try: conn.commit() except Exception as commit_err: log_error(commit_err) except Exception as e: log_error("The Exception is" + str(e)) if int(equipment[0]) < 15: for each_equipment in equipment: update_perf_lbt = update_perf( body[try_now[count]]['performance_tags'], each_equipment, is_active) cursor.execute(update_perf_lbt) count += 1 else: pass except Exception as err: pass elif int(equipment[0]) > 14: try: for each in json.loads( json.dumps( body[try_now[count]]['external_targets'])): if float(each["min"]) <= float(each['target']): update_external_lbt = UPDATED_QUERY_FOR_EXTERANL_TARGETS_NON_FURNACE.format( ('Between' + ' ' + each["min"] + ' and ' + each["max"]), each['target'], each['is_active'], equipment[0], each['parameter']) cursor.execute(update_external_lbt) else: return JsonResponse({MESSAGE_KEY: SET_LIMIT}, status=404) if int(equipment[0]) > 14: update_perf_lbt = update_perf( body[try_now[0]]['performance_tags'], equipment[0], is_active) cursor.execute(update_perf_lbt) else: pass case_equip = UPDATE_CASE_EQUIPMENT_MAPPING.format( equipment[0]) cursor.execute(case_equip) insert_case = INSERT_CASE_EQUIP_MAPPING.format( equipment[0], performance_case_name) cursor.execute(insert_case) except Exception as err: log_error("The Exception is" + str(err)) else: print('The Function Done') try: conn.commit() except Exception as commit_err: log_error(commit_err) if conn: cursor.close() conn.close() return 0 except AssertionError as e: log_error('Exception due to update_equip Function: %s' + str(e)) return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]}, status=e.args[0][STATUS_KEY]) except Exception as e: log_error(traceback.format_exc()) return JsonResponse( {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())}, status=HTTP_500_INTERNAL_SERVER_ERROR)
match = 0 score = 0 for i in range(len(label_set)): # print(gold_standard_labels.values) if label_set[i].lower() in gold_standard_labels.values: rank = np.where( gold_standard_labels.values == label_set[i].lower())[1][0] rank_list.append(rank) match += 1 score += rank / 5 else: score += 1.2 return match, rank_list, score / k if __name__ == '__main__': parser = argparse.ArgumentParser(description='evaluate') parser.add_argument('--experiment_directory', required=True) parser.add_argument('--gold_standard', required=True) parser.add_argument('--label_set', required=True) parser.add_argument('--k', required=True, type=int) args = parser.parse_args() num_exact_matchs, rank_list, label_score = evaluate_labels( args.gold_standard, args.label_set, args.k) print(str(json.dumps(num_exact_matchs))) # print(str(json.dumps(rank_list))) print(str(label_score))
def to_json(data, orient='values'): return ujson.dumps(data, double_precision=2, orient=orient)
def __bytes__(self): return json.dumps(self.__dict__).encode()
ls.main(args.experiment_directory, article_labels_orig, args.percentile, args.label_score, args.experiment_directory + '/original_country_labels.csv', args.num_candidates) # Joint Clustering tf_idf_score = pd.read_csv(args.experiment_directory + args.tf_idf_score_file) joint_alg_groups, joint_distance_list, joint_average_distance = km.fit_joint_all(X, args.article_keywords, args.experiment_directory + '/top_labels.csv', ids, args.k, args.weight, xy_embeddings, tf_idf_score) joint_alg_groups = pd.DataFrame(joint_alg_groups) joint_alg_groups = ids.join(joint_alg_groups) joint_alg_groups.columns = ['article_id', 'country'] joint_alg_groups['distance'] = joint_distance_list joint_alg_groups.to_csv('%s/new_cluster_groups.csv' % (args.experiment_directory,), index=False) joint_alg_groups.to_csv('%s/score_cluster_groups.csv' % (args.experiment_directory,), index=False) # Joint Labeling article_labels_new = pd.merge(article_labels, joint_alg_groups, on='article_id') article_labels_new = pd.merge(article_labels_new, label_names, on='label_id') ls.main(args.experiment_directory, article_labels_new, args.percentile, args.label_score, args.experiment_directory + '/new_country_labels.csv',args.num_candidates) # get labels based on label scores instead of running tfidf again score_based_labels = get_final_labels(args.article_keywords, joint_alg_groups, args.experiment_directory + '/top_labels.csv', args.k, tf_idf_score) score_based_labels.to_csv(args.experiment_directory + "/score_country_labels.csv", index=True) print(str(json.dumps(orig_average_distance))) print(str(json.dumps(joint_average_distance)))