def read_clusters(CLUSTERS_FILENAME): cluster_data = read_from_clusters_cache_file(CLUSTERS_FILENAME) for line in cluster_data.strip().split("\n"): centroid_id=le(line)[0] coords=le(line)[1] clusters.append((centroid_id, coords))
def mcr_generator(request): init_def = { "ck_weight": WEIGHTS_DEF["Check"].tolist(), "cntr_weight": WEIGHTS_DEF["Counter"].tolist() } if request.method == 'POST': form = mcrForm(request.POST) if form.is_valid(): return render( request, 'contentout.html', { 'content': tg.generate_mcr( np.array(le(form.cleaned_data["ck_weight"])), np.array(le(form.cleaned_data["cntr_weight"])), form.cleaned_data["ck_dir"], form.cleaned_data["ck_lv"], form.cleaned_data["blind"]) }) else: form = mcrForm(initial=init_def) return render(request, 'mcrcontent.html', {'form': form})
def read_clusters(CLUSTERS_FILENAME): cluster_data = read_from_clusters_cache_file(CLUSTERS_FILENAME) for line in cluster_data.strip().split("\n"): centroid_id=le(line)[0] coords=le(line)[1] if len(coords)!=NUM: continue clusters.append((centroid_id, coords)) delta_clusters[centroid_id] = (list([0]*NUM),0)
def mapper(): while True: line = sys.stdin.readline() if not line: break ws = line.strip('\n').split('_') key = ws[0] value=le(ws[1])+le(ws[2]) #value = le(ws[1])[:NUM] nearest_cluster_id,nearest_cluster_dist = get_nearest_cluster(value) print str(nearest_cluster_id)+"\t"+str(key)+"_"+str(nearest_cluster_dist)
def read_txt(): 'Чтение файла и подгрузка данных по состоянию и наличию категорий.' try: txt = open('text.txt', 'r') global kategory_rashod, kategory_dohod kategory_dohod_sinh, kategory_rashod_sinh = (txt.read()).split('#&*#') print('Read_txt(): ' + kategory_dohod_sinh, kategory_rashod_sinh) txt.close() except: return None kategory_dohod = le(kategory_dohod_sinh) kategory_rashod = le(kategory_rashod_sinh)
def break_my_team(request): init_def = { "ck_weight": WEIGHTS_DEF["Check"].tolist(), "cntr_weight": WEIGHTS_DEF["Counter"].tolist() } global aps_list_full if request.method == 'POST': pc = 0 while "pkmn%d" % pc in request.POST: pc += 1 if "calculate" in request.POST: form = brmtForm(pc)(request.POST) if form.is_valid(): res = render( request, 'contentout.html', { 'content': tg.brmt( np.array([ aps_list_full.index( form.cleaned_data['pkmn%d' % i]) for i in xrange(pc) ]), np.array(le(form.cleaned_data["ck_weight"])), np.array(le(form.cleaned_data["cntr_weight"])), form.cleaned_data["ck_dir"], form.cleaned_data["blind"]) }) return res else: form = brmtForm(pc)(request.POST) reqdata = dict(init_def) for i in xrange(pc): cont = 'pkmn%d' % i if cont in form.data: reqdata.update({cont: form.data[cont]}) if "add_pkmn_field" in request.POST: pc += 1 form = brmtForm(pc, False)(initial=reqdata) else: form = brmtForm(1, False)(initial=init_def) return render(request, 'brmtcontent.html', {'form': form})
def plot_results(filename, plot_dir): raw_stat = OrderedDict() with open(filename, 'r') as f: dic = OrderedDict(csv.reader(f)) for key, value in dic.items(): raw_stat[tuple(map(int, key.split('_')))] = le(value) processed_stat = row2col(raw_stat) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') c = ['r', 'g', 'b', 'y', 'm', 'c'] for qSNR, acc in processed_stat.items(): for i in range(len(acc)): xs = np.array(acc[acc.keys()[i]].keys()) + (i - len(acc) / 2) * 0.4 ys = acc[acc.keys()[i]].values() ax.bar(xs, ys, width=0.4, zs=qSNR, zdir='y', color=c[i], align='center', label=acc.keys()) for x, y in zip(xs, ys): ax.text(x, qSNR, y, '%2.2f' % y, ha='left', va='bottom') ax.set_xlabel('Gaussian Noise SNR (dB)') ax.set_ylabel('Uniform Noise SNR (dB)') ax.set_zlabel('Testing Accuracy') #plt.legend() plt.show() fig.savefig(plot_dir)
def index(): args = le(app.current_request.raw_body) ip = app.current_request.to_dict().get("context", {}).get("identity", {}).get("sourceIp") # return {'ip':ip, 'args':args } return {'verified': verified(args, ip)}
def mapper(): while True: line = sys.stdin.readline() if not line: break ws = line.strip('\n').split('_') key = ws[0] value = le(ws[1])+le(ws[2]) if len(value)!=NUM: continue nearest_cluster_id = get_nearest_cluster(value) accsum, cont = delta_clusters[nearest_cluster_id] delta_clusters[nearest_cluster_id] = (vecplus(accsum,value), cont+1) for key in delta_clusters: accsum,cont = delta_clusters[key] print str(key) + "\t" + str(accsum)+";"+str(cont)
def reducer(): oldKey = None accsum_total = list([0]*NUM) count_total = 0 while True: line = sys.stdin.readline() if not line: break ws = line.strip().split('\t') if len(ws)!=2: continue cluster_id, totals = ws accsum,count =totals.split(";") accsum=le(accsum) if oldKey and oldKey != cluster_id: emit_new_lat_long(oldKey, accsum_total,count_total) accsum_total = list([0]*NUM) count_total = 0 oldKey = cluster_id accsum_total = vecplus(accsum_total,accsum) count_total += float(count) # print oldKey, accsum_total, count_total if oldKey != None: emit_new_lat_long(oldKey, accsum_total, count_total)
def readMessage(login, id): tmp = login.split('@') response = scraper.get( 'https://www.1secmail.com/api/v1/?action=readMessage&login=%s&domain=%s&id' '=%s' % (tmp[0], tmp[1], id)).text result = le(response) return result.get('textBody')
def plot_results(filename, plot_dir): raw_stat = OrderedDict() with open(filename, 'r') as f: dic = OrderedDict(csv.reader(f)) for key, value in dic.items(): raw_stat[tuple(map(int, key.split('_')))] = le(value) processed_stat = row2col(raw_stat) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') c = ['r','g','b','y','m','c'] for qSNR, acc in processed_stat.items(): for i in range(len(acc)): xs = np.array(acc[acc.keys()[i]].keys())+(i-len(acc)/2)*0.4 ys = acc[acc.keys()[i]].values() ax.bar(xs, ys, width=0.4, zs=qSNR, zdir='y', color=c[i], align='center', label=acc.keys()) for x, y in zip(xs, ys): ax.text(x, qSNR, y, '%2.2f'%y, ha='left', va='bottom') ax.set_xlabel('Gaussian Noise SNR (dB)') ax.set_ylabel('Uniform Noise SNR (dB)') ax.set_zlabel('Testing Accuracy') #plt.legend() plt.show() fig.savefig(plot_dir)
def team_generator(request): if request.method == 'POST': form = genForm(request.POST) if form.is_valid(): return render( request, 'contentout.html', { 'content': tg.team_generator( np.array(le(form.cleaned_data["weights"])), form.cleaned_data["ck_dir"], np.array([ aps_list_full.index(i) for i in 'Kingdra|Swift Swim', 'Kabutops|SD Swift Swim', 'Omastar|SS Swift Swim', 'Swampert-Mega|Swift Swim' if form.cleaned_data["bl_rain"] ])) }) else: form = genForm(initial={"bl_rain": True}) return render(request, 'gencontent.html', {'form': form})
def getData(pidList): apiurl = 'http://www.ows.newegg.com/Products.egg' OutData = [] for pid in pidList: sleep(1) try: r = requests.get('%s/%s' % (apiurl, pid)).content js = loads(r) g = {} g['Title'] = js['Title'] final_price = js['FinalPrice'].replace(',', '') if final_price.count('Checkout') == 1: g['FinalPrice'] = float('NaN') elif final_price == 'See price in cart': g['FinalPrice'] = float(js['MappingFinalPrice'].replace( ',', '').replace('$', '')) else: g['FinalPrice'] = float(final_price.replace('$', '')) g['OriginalPrice'] = float(js['OriginalPrice'].replace( ',', '').replace('$', '')) g['Instock'] = BoolToInt(js['Instock']) g['Rating'] = js['ReviewSummary']['Rating'] try: g['TotalReviews'] = le(js['ReviewSummary']['TotalReviews'])[0] except: g['TotalReviews'] = 0 g['IsHot'] = BoolToInt(js['IsHot']) ShippingPrice = js['ShippingInfo']['NormalShippingText'].split( ' ')[0] if ShippingPrice.count('Free') == 1: g['ShippingPrice'] = 0.0 elif ShippingPrice.count('Special') == 1: g['ShippingPrice'] = 2.99 # "Special shipping => $2.99 Egg Saver Shipping" else: g['ShippingPrice'] = float(ShippingPrice.replace('$', '')) g['IsShipByNewegg'] = BoolToInt(js['IsShipByNewegg']) if len(js['PromotionText']) > 0: g['Promotion'] = js['PromotionText'] else: g['Promotion'] = 'NaN' MIR = js['MailInRebateInfo'] if MIR is None: g['MailInRebateInfo'] = 'NaN' else: g['MailInRebateInfo'] = js['MailInRebateInfo'][0] g['PID'] = pid g['Brand'] = js['CoremetricsInfo']['Brand'] g['Date'] = dtn OutData.append(g) except: print 'FAILED: %s' % pid pass dframe = DataFrame(OutData) dframe[ 'FinalPriceShipped'] = dframe['FinalPrice'] + dframe['ShippingPrice'] return dframe
def readConfig(): #Note: using print here instead of logging because logger isn't initialized try: fpath = None if sys.argv[1][-4:] == "conf": configFile = sys.argv[1] print("Using config file: " + configFile) else: fpath = sys.argv[1] configFile = "default.conf" print("Using source path: " + fpath) except Exception: print("No file paths provded, using defaults ('default.conf', './*.csv')") fpath = None configFile = "default.conf" try: cp = configparser.RawConfigParser() cp.optionxform = str #changes the default parser to be case-sensitive cp.read(configFile) except Exception as e: print("Error loading config file: " + str(e)) exit(e) opts = {} try: for s in cp.sections(): opts[s] = {} for o in cp[s]: try: opts[s][o] = le(cp[s][o]) except Exception: opts[s][o] = cp[s][o] if fpath is not None: opts["Input"]["fpath"] = fpath #opts["Input"]["fpath"] = opts["Input"]["fpath"] #.replace("\\","\\\\") if not os.path.isabs(opts["Input"]["sourceCharsFile"]): opts["Input"]["sourceCharsFile"] = os.path.join(opts["Input"]["fpath"], opts["Input"]["sourceCharsFile"]) if opts["Output"]["outputPath"] == "": opts["Output"]["outputPath"] = opts["Input"]["fpath"] if opts["Output"]["consoleLogLevel"] in ("NOTSET", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"): opts["Output"]["consoleLogLevel"] = eval("logging." + opts["Output"]["consoleLogLevel"]) except Exception as e: print("Error processing " + configFile + ": " + str(e)) exit(e) setupLogging(opts["Output"]["outputPath"], consoleLogLevel=opts["Output"]["consoleLogLevel"]) logging.debug("Configuration:" + str(opts)) return opts
def unique(inp, field): for line in inp: line = line.strip() if line: try: record = le(line) if field in record: print '{0}\t{1}'.format(record[field], 1) except SyntaxError: print 'IgnoredRecord'
def execute(self): if not self.__set_mode(): raise ValueError('Invalid input') path = input('Enter path> ') if self.__mode == ReadClient: print(ReadClient.read(ReadClient(), path)) elif self.__mode == WriteClient: driver = input('Enter driver> ') data = le(input('Enter data> ')) print(WriteClient.write(WriteClient(writer=driver), path, data))
def fill(): data = pd.read_excel(config.emails_xls).fillna("") for person in data.iterrows(): person = person[1] HR = le(person['HR']) if len(person['HR']) else '' LL = le(person['LL']) if len(person['LL']) else '' attributes = {'first_name' : person['first_name'], 'last_name' : person['last_name'], 'HR' : Json(HR), 'LL' : Json(LL), 'email' : person['email'], 'area_code' : person['area_code'], 'phone' : person['phone'], 'optin' : bool(person['optin'])} dbc.query(config.FILL,attributes) #This is bad, should use psycopg2.cursor.executemany for table filling
def translation_function(): translation_text = input('Insert english text to translate on russian ') data = {'text': translation_text} resp = requests.post(URL, params=params, data=data) result = resp.text result_dict = le(result) print(result_dict) return result_dict
def parse_sweep(argument_list, argument_range): if argument_list is not None: sweep = argument_list else: # We use this trick to correctly parse ints or float from string, so # that the meta-args end up with the correct type from ast import literal_eval as le range_args = [le(x) for x in argument_range] start, end, step = range_args sweep = np.arange(start, end, step) return sweep
def getData(pidList): apiurl = 'http://www.ows.newegg.com/Products.egg' OutData = [] for pid in pidList: sleep(1) try: r = requests.get('%s/%s' % (apiurl, pid)).content js = loads(r) g = {} g['Title'] = js['Title'] final_price = js['FinalPrice'].replace(',', '') if final_price.count('Checkout') == 1: g['FinalPrice'] = float('NaN') elif final_price == 'See price in cart': g['FinalPrice'] = float(js['MappingFinalPrice'].replace(',', '').replace('$', '')) else: g['FinalPrice'] = float(final_price.replace('$', '')) g['OriginalPrice'] = float(js['OriginalPrice'].replace(',', '').replace('$', '')) g['Instock'] = BoolToInt(js['Instock']) g['Rating'] = js['ReviewSummary']['Rating'] try: g['TotalReviews'] = le(js['ReviewSummary']['TotalReviews'])[0] except: g['TotalReviews'] = 0 g['IsHot'] = BoolToInt(js['IsHot']) ShippingPrice = js['ShippingInfo']['NormalShippingText'].split(' ')[0] if ShippingPrice.count('Free') == 1: g['ShippingPrice'] = 0.0 elif ShippingPrice.count('Special') == 1: g['ShippingPrice'] = 2.99 # "Special shipping => $2.99 Egg Saver Shipping" else: g['ShippingPrice'] = float(ShippingPrice.replace('$', '')) g['IsShipByNewegg'] = BoolToInt(js['IsShipByNewegg']) if len(js['PromotionText']) > 0: g['Promotion'] = js['PromotionText'] else: g['Promotion'] = 'NaN' MIR = js['MailInRebateInfo'] if MIR is None: g['MailInRebateInfo'] = 'NaN' else: g['MailInRebateInfo'] = js['MailInRebateInfo'][0] g['PID'] = pid g['Brand'] = js['CoremetricsInfo']['Brand'] g['Date'] = dtn OutData.append(g) except: print 'FAILED: %s' % pid pass dframe = DataFrame(OutData) dframe['FinalPriceShipped'] = dframe['FinalPrice'] + dframe['ShippingPrice'] return dframe
def group_by_year_zip(inp): random.seed() for line in inp: line = line.strip() if line: try: record = le(line) if 'dispenseQuarter' in record and 'threeDigitSubsZip' in record and 'untsDispensedQuantity' in record and record['threeDigitSubsZip'].strip(): print '{0}|{1}\t{2}'.format(random.randrange(1, 5), record['threeDigitSubsZip'].strip(), record['untsDispensedQuantity'].strip()) except SyntaxError: print 'IgnoredRecord'
def get(self, id): fbengine = sqlalchemy.create_engine(fbengine_url) fbsession = scoped_session(sessionmaker(bind=fbengine)) try: thisuser = fbsession.query(fb_user).filter(fb_user.id == id).one() except NoResultFound: self.write("Page not found - Please check the id given") else: resultdata = le(thisuser.frienddata) self.render("templates/fbexample.html", facebook_app_id=facebook_app_id, reactionsresult=resultdata["reactionsresult"], commentsresult=resultdata["commentsresult"], thisuser=thisuser.getDetails(), type="Their", friendlookup = [], lookup={}) fbsession.remove()
async def heal_branch(ans: Message): button = None if ans.payload is not None: button = le(ans.payload) print(button["button"]) # Достаем из payload название кнопки чтобы не делать так: if ans.payload == "{\"button\":\"назад\"}": button = button["button"] u = await GameValues.get(user_id=ans.peer_id) info = await Users.get(user_id=ans.peer_id) if button == "таблетка": if info.health < 50: u.tablet = u.tablet - 1 info.health = info.health + 10 info.statBal = info.statBal + 100 if info.health > 50: info.health = 50 await u.save() await info.save() await bot.branch.exit(ans.peer_id) await heal(ans) else: await ans("Ваш питомец уже здоров") await bot.branch.exit(ans.peer_id) await menu(ans) elif button == "шприц": if info.health < 50: u.injector = u.injector - 1 info.health = info.health + 40 info.statBal = info.statBal + 100 if info.health > 50: info.health = 50 await u.save() await info.save() await bot.branch.exit(ans.peer_id) await heal(ans) else: await ans("Ваш питомец уже здоров") await bot.branch.exit(ans.peer_id) await menu(ans) elif button == "меню": await menu(ans) await bot.branch.exit(ans.peer_id) else: await ans("Команда не найдена, попробуйте еще раз или напишите \"Меню\"")
def profile(id): teacher = db.session.query(Teacher).get_or_404(int(id)) daystime = {} goals = {} freetime = le(teacher.free) for day, var in freetime.items(): for time, check in var.items(): if check == True: daystime[day] = time for key, value in contents['goals'].items(): if key in teacher.goals: goals[key] = value return render_template('profile.html', teacher=teacher, daystime=daystime, goals=goals)
def vertCoin(): #Enter corrected data file dataFile = r'\2018-08-01_11-00-27_P_0_CORRECTED.txt' #Reads data file into matrix of strings (even though they are CLEARLY lists) pulses = pd.read_csv(dataLoc + dataFile, sep=';') pulsesArray = pulses.as_matrix() #Converts those strings into iterable lists for l in pulsesArray: for i in range(1, 5): l[i] = le(l[i]) #Identifies vertical coincidence, sums rising and falling edge times (in nanoseconds?) for every pulse in the two cahnnels, and creates a TOT list for the left side of the detector (channels zero and two) as well as the right side (channels one and three) for line in pulsesArray: if not line[1] == [] and not line[3] == []: for i in range(0, len(line[1])): leftPulses.append(sum(line[1][i])) for j in range(0, len(line[3])): leftPulses.append(sum(line[3][i])) if not line[2] == [] and not line[4] == []: for i in range(0, len(line[2])): rightPulses.append(sum(line[2][i])) for j in range(0, len(line[4])): rightPulses.append(sum(line[4][i])) #Automates binning for the data sets if max(leftPulses) > max(rightPulses): maxVal = max(leftPulses) else: maxVal = max(rightPulses) if min(leftPulses) < min(rightPulses): minVal = min(leftPulses) else: minVal = min(rightPulses) bins = np.arange(np.floor(minVal), np.ceil(maxVal), (maxVal - minVal) / 250) #Creates TOT histogram for the two data sets plt.figure() plt.hist([leftPulses, rightPulses], bins=bins) # plt.yscale('log') plt.legend(['Left-Hand Channels', 'Right-Hand Channels']) plt.xlabel('Time(ns)') plt.ylabel('Number of Events') plt.title('Time Over Threshold')
def get(section, config, force_reload=False): """ Collect the specified configuration in the provided section. This function evaluates the type of the variable and return the correct type. It is valid and tested for Lists [], Dicts {}, Strings, Floats and Integers. :param section: the Section to collect the values :param config: the configuration to collect :param force_reload: variable used by the reload decorator to force the INI File reload. :return: The configuration as the correct type :raises: configparser.NoSectionError, when the provided section is not loaded configparser.NoOptionError, when the provided config is not loaded configparser.InterpolationMissingOptionError, when the OS ENV doesnt exist ValueError: malformed node or string, when the OS ENV are not surrounded by '' """ self = ConfReader.instance return le(self.config.get(section, config))
def support_data(): with requests.Session() as s: response = s.post(url=link, data=supportpayload) soup = BeautifulSoup(response.text, 'html.parser') column_2 = '{' for entry in soup.select("tr:nth-child(2) > td > a:nth-child(1)"): stroka = '"' + str(entry.get_text(strip=True)).replace('(', '": "') column_2 += stroka.replace(')', '", ') for entry in soup.select("ul > a"): if entry.get_text(strip=True) == "VIP": stroka = '"' + str(entry.get_text(strip=True)) column_2 += stroka + '" : "None", ' else: stroka = '"' + str(entry.get_text(strip=True)).replace( '(', '" : "') column_2 += stroka.replace(')', '", ') column_2 += '}' support_invoices = le(column_2) support_mails = ''.join( [f'{key}: {value}\n' for key, value in support_invoices.items()]) return support_mails
def work(self, item): print("%s: %s" % (item['channel'], item['data'])) if item['channel'] != 'test': return data = le(str(item['data'])) print "Data: %s" % data # if type(data) == dict: # if 'Head' in data.keys(): # head_pan = data['Head']['y'] # head_tilt = data['Head']['x'] # self.head_pan_pub.publish(Float64(head_pan)) # self.head_tilt_pub.publish(Float64(head_tilt)) # if 'Neck' in data.keys(): # head_pan = data['Neck']['y'] # head_tilt = data['Neck']['x'] # self.head_pan_pub.publish(Float64(head_pan)) # self.head_tilt_pub.publish(Float64(head_tilt)) # if 'RShoulder' in data.keys(): # pitch = data['RShoulder']['z'] # roll = data['RShoulder']['x'] # self.mapping['RShoulder']['x'].publish(Float64(pitch)) # self.mapping['RShoulder']['z'].publish(Float64(roll)) # if type(data) == dict and 'RShoulder' in data.keys(): # pitch = data['RShoulder']['z'] # roll = data['RShoulder']['x'] # self.mapping['RShoulder']['x'].publish(Float64(pitch)) # self.mapping['RShoulder']['z'].publish(Float64(roll)) if type(data) == dict: for joint in data: if joint not in self.mapping.keys(): return pose = data[joint] for axis in ['z', 'x', 'y']: pub = self.mapping[joint][axis] theta = pose[axis] print "Publisher: %s, theta: %s" % (pub, theta) if pub: pub.publish(Float64(theta))
async def hospital_branch(ans: Message): button = None if ans.payload is not None: button = le(ans.payload) print(button["button"]) # Достаем из payload название кнопки чтобы не делать так: if ans.payload == "{\"button\":\"назад\"}": button = button["button"] u = await GameValues.get(user_id=ans.peer_id) info = await Users.get(user_id=ans.peer_id) if button == "таблетка": if info.balance >= 100: u.tablet = u.tablet + 1 info.balance = info.balance - 100 await info.save() await u.save() await ans("Вы успешно купили таблетку!") await bot.branch.exit(ans.peer_id) await hospital(ans) else: await ans("У вас не хватает средств") elif button == "шприц": if info.balance >= 100: u.injector = u.injector + 1 info.balance = info.balance - 100 await info.save() await u.save() await ans("Вы успешно купили шприц!") await bot.branch.exit(ans.peer_id) await hospital(ans) else: await ans("У вас не хватает средств") elif button == "меню": await menu(ans) await bot.branch.exit(ans.peer_id) else: await ans("Команда не найдена, попробуйте еще раз или напишите \"Меню\"")
def get(self): change = self.get_argument("change") id = self.get_argument("id") name = self.get_argument("name") selfid = self.get_argument("selfid") fbengine = sqlalchemy.create_engine(fbengine_url) fbsession = scoped_session(sessionmaker(bind=fbengine)) try: thisdbuser = fbsession.query(fb_user).filter(fb_user.id == selfid).one() except NoResultFound: print ("user not found") self.write('{"status": "error"}') else: try: resultdata = le(thisdbuser.frienddata) reactionsresult=resultdata["reactionsresult"] commentsresult=resultdata["commentsresult"] except (ValueError, SyntaxError) as e: print ("data could not be parse") self.write('{"status": "error"}') else: if "reaction" in change: index = int(change.replace("reaction", "")) reactionsresult[index]["name"] = name reactionsresult[index]["id"] = id thisdbuser.frienddata = str({"reactionsresult": reactionsresult, "commentsresult": commentsresult}) fbsession.commit() self.write('{"status": "success"}') elif "comment" in change: index = int(change.replace("comment", "")) commentsresult[index]["name"] = name commentsresult[index]["id"] = id thisdbuser.frienddata = str({"reactionsresult": reactionsresult, "commentsresult": commentsresult}) fbsession.commit() self.write('{"status": "success"}') else: print ("something wrong with input") self.write('{"status": "error"}')
exam_ticket[n]["text"] += '\n'+lines[1:] elif lines[0] == '#': if not "left" in exam_ticket[n]["question"]: exam_ticket[n]["question"]["left"] = [] exam_ticket[n]["question"]["left"].append({"id":'A',"text": lines[1:]}) elif lines[0] == '*': if not "choices" in exam_ticket[n]["question"]: exam_ticket[n]["question"]["choices"] = [] id_a = 1 exam_ticket[n]["question"]["choices"].append({"id": str(id_a),"text": lines[1:]}) id_a+=1 elif lines[0] == '?': exam_ticket[n]["question"]["type"] = lines[1:] elif lines[0] == '-': exam_ticket[n]["solution"] = {} exam_ticket[n]["solution"]["correct"] = le(lines[1:]) elif lines[0] == '|': exam_ticket[n]["solution"] = {} exam_ticket[n]["solution"]["correct_variants"] = [] lines = lines[1:].split('|') for line in lines: if line[0] == '[' or line[0] == '{': exam_ticket[n]["solution"]["correct_variants"].append(le(line)) else: exam_ticket[n]["solution"]["correct_variants"].append(line) else: if not "choices" in exam_ticket[n]["question"]: exam_ticket[n]["question"]["choices"] = [] id_a = 1 exam_ticket[n]["question"]["choices"].append({"id": str(id_a),"text": lines}) id_a+=1
p.terminate( ) #Added because some workers were hanging, should probably be resolved more elegantly p.join() if __name__ == "__main__": try: try: config_path = sys.argv[1] except Exception: config_path = "./config.conf" cp = configparser.ConfigParser() cp.read(config_path) all_cams = le(cp["cameras"]["all_cams"]) height_group = le(cp["cameras"]["height_group"]) stitch_pair = le(cp["cameras"]["stitch_pair"]) camIDs = le(cp["cameras"]["camIDs"]) cid_flat = camIDs + [stitch_pair[camID] for camID in camIDs] days = le(cp["forecast"]["days"]) inpath = le(cp["paths"]["inpath"]) # tmpfs='/dev/shm/' tmpfs = le(cp["paths"]["tmpfs"]) stitch_path = le(cp["paths"]["stitch_path"]) static_mask_path = le(cp["paths"]["static_mask_path"]) try: cam_tz = pytz.timezone(cp["cameras"]["cam_timezone"])
def cart(request): uri = request.build_absolute_uri() upid = int(uri[uri.find("?=") + 2:]) posts = [ { 'upid': 2, 'model': 'Dell Inspiron 15 5590 v2', 'device': 'laptop', 'price': 'Rs. 49,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 3, 'model': 'Dell Inpiron 15 5590 v3', 'device': 'laptop', 'price': 'Rs. 52,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 4, 'model': 'Dell Inspiron 15 5590 v1', 'device': 'laptop', 'price': 'Rs. 39,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 5, 'model': 'Dell Inspiron 15 7591', 'device': 'laptop', 'price': 'Rs. 63,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 6, 'model': 'Dell XPS 13', 'device': 'laptop', 'price': 'Rs. 67,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 7, 'model': 'Dell XPS 13 v2', 'device': 'laptop', 'price': 'Rs. 77,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 8, 'model': 'Dell XPS 15 v1', 'device': 'laptop', 'price': 'Rs. 75,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 9, 'model': 'Dell XPS 15 v2', 'device': 'laptop', 'price': 'Rs. 1,24,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 10, 'model': 'Dell Alienware 15 v1', 'device': 'laptop', 'price': 'Rs. 1,28,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 11, 'model': 'Dell Alienware 15 v2', 'device': 'laptop', 'price': 'Rs. 1,30,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 12, 'model': 'Dell Alienware 17 v1', 'device': 'laptop', 'price': 'Rs. 1,22,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 13, 'model': 'Dell Alienware 17 v2', 'device': 'laptop', 'price': 'Rs. 1,24,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 14, 'model': 'Dell Latitude 14 v2', 'device': 'laptop', 'price': 'Rs. 45,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 15, 'model': 'Dell Latitude 14 v3', 'device': 'laptop', 'price': 'Rs. 50,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 16, 'model': 'Dell Latitude 14 v1', 'device': 'laptop', 'price': 'Rs. 44,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, { 'upid': 17, 'model': 'Dell Inspiron 17', 'device': 'laptop', 'price': 'Rs. 55,000.00', 'imagePath': 'products/assets/img/inspiron5390.jpg' }, ] from ast import literal_eval as le with open(tempFile, 'r') as Fobj: data_raw = Fobj.read() data = le(data_raw) abandonRate = data print(abandonRate) if (abandonRate < 0.30): tweets_before = 1 context = {"posts": posts, "upid": upid, "tweets_before": 1} return render(request, 'products/cart.html', context)
<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li> </ul><p>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy. Notwithstanding the above, using this IS does not constitute consent to PM, LE, or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</p>''') # registration EMAIL_HOST = os.getenv('EMAIL_HOST', None) EMAIL_PORT = le(os.getenv('EMAIL_PORT', '25')) EMAIL_USE_TLS = str2bool(os.getenv('EMAIL_USE_TLS', 'False')) EMAIL_BACKEND = os.getenv( 'EMAIL_BACKEND', 'django.core.mail.backends.smtp.EmailBackend') EMAIL_HOST_USER = os.getenv('EMAIL_HOST_USER', None) EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD', None) DEFAULT_FROM_EMAIL = os.getenv('DEFAULT_FROM_EMAIL', None) THEME_ACCOUNT_CONTACT_EMAIL = os.getenv('THEME_ACCOUNT_CONTACT_EMAIL', None) ACCOUNT_ACTIVATION_DAYS = le(os.getenv('ACCOUNT_ACTIVATION_DAYS', '7')) # path setup LOCAL_ROOT = os.path.abspath(os.path.dirname(__file__)) APP_ROOT = os.path.join(LOCAL_ROOT, os.pardir) # static files storage STATICFILES_DIRS = [
def group_by_year_zip_ndc(inp): for line in inp: record = le(line.strip()) if 'dispenseQuarter' in record and 'threeDigitSubsZip' in record and 'ndc' in record and 'untsDispensedQuantity' in record: print '{0}|{1}|{2}\t{3}'.format(record['dispenseQuarter'].strip(), record['threeDigitSubsZip'].strip(), record['ndc'].strip(), record['untsDispensedQuantity'].strip())
def keys(self): return [le(key) for key in dict(self)]
t_local = local_tz.localize(t, is_dst=None) t_utc = t_local.astimezone(pytz.utc) return t_utc.timestamp() if __name__ == "__main__": try: try: config_path = sys.argv[1] except Exception: config_path = "./config.conf" cp = configparser.ConfigParser() cp.read(config_path) inpath=le(cp["paths"]["inpath"]) outpath=le(cp["paths"]["feature_path"]) tmpfs=le(cp["paths"]["tmpfs"]) stitch_path=le(cp["paths"]["stitch_path"]) GHI_Coor = le(cp["GHI_sensors"]["GHI_Coor"]) GHI_loc=[GHI_Coor[key] for key in sorted(GHI_Coor)] GHI_loc=np.array(GHI_loc) #[:1]; lead_minutes=le(cp["forecast"]["lead_minutes"]) lead_steps=[lt/INTERVAL for lt in lead_minutes] days=le(cp["forecast"]["days"]) print("lead_minutes: %s\n lead_steps: %s\n" % (lead_minutes, lead_steps)) try:
def __iter__(self): for key in dict.__iter__(self): yield le(key)
'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend', ) AUTH_LDAP_BIND_DN = os.environ.get('AUTH_LDAP_BIND_DN', '') AUTH_LDAP_BIND_PASSWORD = os.environ.get('AUTH_LDAP_BIND_PASSWORD', '') AUTH_LDAP_USER_ATTR_MAP = { 'first_name': 'givenName', 'last_name': 'sn', 'email': LDAP_EMAIL_MAP, } AUTH_LDAP_USER_SEARCH = LDAPSearch(LDAP_SEARCH_DN, ldap.SCOPE_SUBTREE, AUTH_LDAP_USER) # ldap django search mappings GROUP_SEARCH = os.environ.get('LDAP_GROUP_SEARCH', None) if GROUP_SEARCH and len(GROUP_SEARCH) > 0: try: GROUP_SEARCH = le(GROUP_SEARCH) except SyntaxError: GROUP_SEARCH = GROUP_SEARCH if GROUP_SEARCH: AUTH_LDAP_USER_FLAGS_BY_GROUP = {} AUTH_LDAP_GROUP_SEARCH = LDAPSearch( GROUP_SEARCH, ldap.SCOPE_SUBTREE, "(objectClass=group)" ) ACTIVE_SEARCH = os.environ.get('LDAP_ACTIVE_SEARCH', None) STAFF_SEARCH = os.environ.get('LDAP_STAFF_SEARCH', None) SU_SEARCH = os.environ.get('LDAP_SUPERUSER_SEARCH', None) if ACTIVE_SEARCH and len(ACTIVE_SEARCH) > 0:
import sys, os, csv, argparse sys.path.append(os.path.join(os.path.dirname(os.getcwd()))) from lib import utils from ast import literal_eval as le from collections import OrderedDict argv = argparse.ArgumentParser(description='Convert row major csv to column based') argv.add_argument('--i', type=str, required=True) argv.add_argument('--o', type=str, required=True) arg = argv.parse_args() in_file = './' + arg.i + '.csv' out_file = './' + arg.o + '.csv' raw_stat = OrderedDict() with open(in_file, 'r') as f: dic = OrderedDict(csv.reader(f)) for key, value in dic.items(): raw_stat[tuple(map(int, key.split('_')))] = le(value) cols = utils.row2col(raw_stat) with open(out_file, 'w') as f: wtr = csv.writer(f, delimiter='\t') wtr.writerow(['SNR','loss3/top-5','loss3/top-1','loss2/top-1', 'loss1/top-1','loss2/top-5','loss1/top-5']) for key, value in cols.items(): row = [int(key)] + [i[int(key)] for i in value.values()] wtr.writerow(row)