def test(): testa = hashing(20) for x in range(1, 20): newKey = key.key(10, x) testa.rehashProbing(newKey, False) print(testa.hashtable.table) print(testa.hashtable.table)
def __init__(self, meta, p): self.p = p path_builder = path("{topdir}/{p}") self.path_builder = path_builder.fill(topdir=meta.topdir, p=self.p) self.key = key.key(p=p) # perparam differs from meta self.ndims = meta.params[self.p] self.allzs = meta.allzs[:self.ndims+1]#sorted(set(self.zlos+self.zhis)) self.zlos = self.allzs[:-1]#meta.allzlos[:self.ndims] self.zhis = self.allzs[1:]#meta.allzhis[:self.ndims] self.zmids = (self.zlos+self.zhis)/2. self.zavg = sum(self.zmids)/self.ndims # define realistic underlying P(z) for this number of parameters self.realsum = sum(meta.realistic[:self.ndims]) self.realistic_pdf = np.array([meta.realistic[k]/self.realsum/meta.zdifs[k] for k in xrange(0,self.ndims)]) self.truePz = self.realistic_pdf self.logtruePz = np.array([m.log(max(tPz,sys.float_info.epsilon)) for tPz in self.truePz]) # define flat P(z) for this number of parameters self.avgprob = 1./self.ndims/meta.zdif self.logavgprob = m.log(self.avgprob) self.flatPz = [self.avgprob]*self.ndims self.logflatPz = [self.logavgprob]*self.ndims print('initialized '+str(self.ndims)+' parameter test')
def __init__(self,sheet): k = key.key() k.read() scopes = ['https://spreadsheets.google.com/feeds'] service = SAC.from_json_keyfile_name('key.json',scopes) gc = gspread.authorize(service) k.remove() self.sheet = gc.open(sheet).sheet1 self.all = self.sheet.get_all_records()
def poll(self, *args): bindkey = key.key() bindkey.engine = 'xlib' callback = None if args: callback = args[0] self.queue.put((bindkey, callback)) if not callback: self.queue.join() return bindkey
def poll(self, *args): bindkey = key.key() bindkey.engine = "xlib" callback = None if args: callback = args[0] self.queue.put((bindkey, callback)) if not callback: self.queue.join() return bindkey
def get(self): logging.debug("PublicMapAjaxHandler") event_shortname = self.request.get("shortname") page = self.request.get("page") page_int = int(page) if event_shortname == None: event_shortname = "sandy" event = None events = event_db.GetAllCached() for e in events: if e.short_name == event_shortname: event = e #logging.debug(event.name) #q = Query(model_class = site_db.Site)#, projection=('latitude', 'longitude','id', 'status', 'claimed_by', 'work_type', 'derechos_work_type', 'case_number', 'floors_affected')) ids = [] #filter by event #status = "open" #q.filter("event =", event.key()) #q.is_keys_only() #if status == "open": #logging.debug("status == open") #q.filter("status >= ", "Open") #elif status == "closed": #q.filter("status < ", "Open") #logging.debug("status == closed") #logging.debug("status = " + status) #query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) #for q in query: #ids.append(q.key().id()) #q = db.Query(site_db.Site, projection=('latitude', 'longitude','id', 'status', 'claimed_by', 'work_type', 'derechos_work_type', 'case_number', 'floors_affected'), filter('status >=', "open")) #q = site_db.Site.gql("SELECT latitude, longitude, id, claimed_by, work_type, derechos_work_type, case_number, floors_affected WHERE status >= open") where_string = "Open" gql_string = 'SELECT latitude, longitude, claimed_by, work_type, case_number, floors_affected FROM Site WHERE status >= :1 and event = :2'# WHERE status >= %s", where_string q = db.GqlQuery(gql_string, where_string, event.key()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps( [s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) self.response.out.write(output) return
def poll(self, *args): print "poll0" bindkey = key.key() bindkey.engine = 'user32' callback = None if args: callback = args[0] #Queue our request self.queue.put((bindkey, callback)) self.mode = 'poll' if not callback: self.queue.join() return bindkey print "poll8: done"
def send_message(PhoneNo, task): numbers = ','.join(list(map(str, PhoneNo))) url = "https://www.fast2sms.com/dev/bulk" params = { "authorization": key.key(), "sender_id": "FSTSMS", "message": task, "language": "english", "route": "p", "numbers": numbers } headers = {'cache-control': "no-cache"} try: response = requests.request("GET", url, headers=headers, params=params) dict = response.json() print(dict) except: print("Not send. please try again")
def main(): meta = setup() p_runs = {key(p=p):perparam(meta, p) for p in lrange(meta.params)} s_runs = {p.add(s=s):persurv(meta, p_runs[p], s) for s in lrange(meta.survs) for p in p_runs.keys()} n_runs = {s.add(n=n):persamp(meta, s_runs[s], n) for n in xrange(meta.samps) for s in s_runs.keys()} i_runs = {n.add(i=i):perinit(meta, n_runs[n], i) for i in lrange(meta.inits) for n in n_runs.keys()} runs = {'p_runs': p_runs, 's_runs': s_runs, 'n_runs': n_runs, 'i_runs': i_runs} global init_runs init_runs = runs # make initial plots distribute.run_offthread_sync(iplots.initial_plots, meta, runs) nps = mp.cpu_count()#-1 for s_run in s_runs.values(): print ('starting run of: ' + str(s_run.key)) # fork off all of the plotter threads, dist = distribute.distribute(plots.all_plotters, start = True, meta = meta, p_run = s_run.p_run, s_run = s_run, n_runs = s_run.n_runs, i_runs = [i_run for n_run in s_run.n_runs for i_run in n_run.i_runs]) # inject the distribution handler into the metadata object print ('plotter threads started') s_run.dist = dist print ('setting dist on {} to {}'.format(s_run, s_run.dist)) pool = mp.Pool(nps) # may add back plot-only functionality later keys = [s_run.key.add(n=n, i=i) for i in lrange(meta.inits) for n in xrange(meta.samps)] print ('generating {} keys'.format(len(keys))) pool.map(fsamp, keys) dist.finish() print('ending run of: ' + str(s_run.key))
def initial_plots(meta, runs): plot_priorgen(meta) print("runs = " + str(runs)) for p in lrange(meta.params): pkey = key(p=p) p_runs = runs["p_runs"] print("p_runs = " + str(p_runs)) p_run = p_runs[pkey] for s in lrange(meta.survs): skey = pkey.add(s=s) s_run = runs["s_runs"][skey] survinfo = (meta, p_run, s_run) plot_true_tup = plot_true_setup(*survinfo) plot_truevmap_tup = plot_truevmap_setup(*survinfo) plot_pdfs_tup = plot_pdfs_setup(*survinfo) for n in xrange(meta.samps): nkey = skey.add(n=n) n_run = runs["n_runs"][nkey] sampinfo = survinfo + (n_run,) plot_true_tup = plot_true(plot_true_tup, *sampinfo) plot_truevmap_tup = plot_truevmap(plot_truevmap_tup, *sampinfo) ivals = plot_ivals_setup(*sampinfo) plot_priorsamps(*sampinfo) plot_pdfs_tup = plot_pdfs(plot_pdfs_tup, *sampinfo) for i in lrange(meta.inits): ikey = nkey.add(i=i) i_run = runs["i_runs"][ikey] initinfo = sampinfo + (i_run,) ivals = plot_ivals(ivals, *initinfo) plot_ivals_wrapup(ivals, *sampinfo) plot_true_wrapup(plot_true_tup, *survinfo) plot_truevmap_wrapup(plot_truevmap_tup, *survinfo) plot_pdfs_wrapup(plot_pdfs_tup, *survinfo) print("initial plots completed") with open(meta.plottime, "w") as plottimer: plottimer.write(str(timeit.default_timer()) + " iplots \n") plottimer.close()
from flask import Flask, render_template, request, session, redirect, url_for, Markup import psycopg2 import key app = Flask(__name__) app.secret_key = key.key() connection = psycopg2.connect("host=localhost dbname=todo user=postgres password=postgres") cur = connection.cursor() @app.template_filter('cr') def cr(arg): return Markup(arg.replace('\r', '<br>')) @app.route('/') def log_in(): return render_template('index.html') @app.route('/add',methods=['POST', 'GET']) def add(): if request.method == 'POST': username = session.get('name') cur.execute("SELECT COUNT(id) FROM task") rows = cur.fetchall() for row in rows: ids = row[0] + 1 while True: cur.execute("select * from task where id ='"+str(ids)+"'") check = cur.fetchall() if not check: break ids = ids + 1
#export FLASK_APP=application #export FLASK_ENV=development import os from cs50 import SQL from flask import Flask, render_template, request, redirect, session, url_for, flash from werkzeug.utils import secure_filename from werkzeug.security import generate_password_hash, check_password_hash from key import key UPLOAD_FOLDER = "static/profilepics/uploads" ALLOWED_EXTENSIONS = {'pdf', 'png', 'jpg', 'jpeg', 'gif'} app = Flask(__name__) app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER app.config["SECRET_KEY"] = key() db = SQL("sqlite:///yarn.db") # session object contains "user" (the username) and "user_id" (the id) # global variables # non-route functions def getId(user): currentuser = user profile = db.execute("SELECT * FROM users WHERE name = :name", name=currentuser)
def AuthenticatedGet(self, org, event): id_param = self.request.get("id") latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( "Where latitude = :1 and longitude = :2 and event = :3", latitude, longitude, event.key() ): json_string = json.dumps({"id": site.key().id(), "address": site.address}) json_array.append(json_string) self.response.out.write(json.dumps(json_array, default=dthandler)) return if id_param == "all": status = self.request.get("status", default_value="") page = self.request.get("page", default_value="0") page_int = int(page) logging.debug("page = " + page) # query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) # query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class=site_db.Site) ids = [] # filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) # query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) # for q in query: # ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps([s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) self.response.out.write(output) return # if id_param == "all": # county = self.request.get("county", default_value = "all") # status = self.request.get("status", default_value = "") # q = Query(model_class = site_db.Site, keys_only = True) ##filter by event # q.filter("event =", event.key()) # if status == "open": # q.filter("status >= ", "Open") # elif status == "closed": # q.filter("status < ", "Open") # if county != "all": # q.filter("county =", county) # ids = [key.id() for key in q.run(batch_size = 2000)] # output = json.dumps( # [s[1] for s in site_db.GetAllCached(event, ids)], # default=dthandler) # self.response.out.write(output) # return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write(json.dumps(site_db.SiteToDict(site), default=dthandler))
def myexit(): print 'Programm wird beendet' GPIO.cleanup() shutdown() try: # Define storage path_tmp = '/ram/' path = '/mnt/usb/' prefix = 'Titel' extension = '.mp3' # Configure GPIOs GPIO.setmode(GPIO.BOARD) button_shutdown = key.key(7) button_record = key.key(11) GPIO.setup(12, GPIO.OUT) GPIO.setup(15, GPIO.OUT) GPIO.output(12, GPIO.HIGH) # Start thread to detect external memory usb = usbstick.usbstick(path, 13) # Configure volume #m = alsaaudio.Mixer('Mic', 0, 1) #m.setvolume(100, 0, 'capture') # Only run until shutdown button gets pressed while not (button_shutdown.pressed()):
def key_generate(): keys = key.key() label_pri_Key['text'] = keys[0] label_pub_Key['text'] = keys[1] result = "秘密鍵:" + keys[0] + "\n" + "公開鍵:" + keys[1] mbox.showinfo('秘密鍵と公開鍵',result)
class weatherUrl: ''' 기상청 API는 URL형태로 요청을 하면 기상청에서 요청 상황과 맞게 기상 예보를 보내준다. 따라서 URL형태로 만들때의 지정된 형태가 있어 그 형태를 맞춰줘야한다 기상청 API의 카테고리의 맞게 파싱을 해줘야하기 때문에 사용하는 클래스이다. ''' weaterkey = key() def __init__(self): self.cp = cp() self.today = weatherTime() #오늘 날짜를 보기위해 객체선언 self.serviceKey = self.weaterkey.key #사용자 키 self.base_date = self.today.date() #요청시의 날짜 self.base_time = self.timeCheck( time_table=self.get_standard_time()) #요청시의 시간 self.nx = '63' #전북대학교 기준좌표 x:63 y:89 self.ny = '89' self.numOfRows = '10' #요청시 열의 수 self.pageNo = '1' self._type = 'json' #요청시의 데이터의 형태 #동네예보 카테고리 self.forestspace_category = { 'POP': ['강수확률', '(%)'], 'PTY': ['강수', "형태"], 'R06': ['6시간 강수량', '(mm)'], 'REH': ['습도', '(%)'], 'S06': ['6시간 신적설', '(cm)'], 'SKY': ['하늘', "상태"], 'T3H': ['3시간 기온', '°C'], 'TMN': ['아침 최저 기온', '°C'], 'TMX': ['낮 최고 기온', '°C'], 'UUU': ['풍속[동서성분]', '(m/s)'], 'VVV': ['풍속[남북성분]', '(m/s)'], 'WAV': ['파고', '(M)'], 'VEC': ['풍향', '(m/s)'], 'WSD': ['풍속', '(m/s)'] } #초단기실황 예보 카테고리 self.forestData_category = { 'T1H': ['기온', '°C'], 'RN1': ['1시간 강수량', 'mm'], 'UUU': ['동서바람성분', 'm/s'], 'VVV': ['남북바람성분', 'm/s'], 'REH': ['습도', '%'], 'PTY': ['강수형태', '코드값'], 'VEC': ['풍향', '0'], 'WSD': ['풍속', '1'] } #초단기예보 self.forestGrib_category = { 'T1H': ['기온', '°C'], 'RN1': ['1시간 강수량', 'mm'], 'SKY': ['하늘상태', '코드값'], 'UUU': ['동서바람성분', 'm/s'], 'VVV': ['남북바람성분', 'm/s'], 'REH': ['습도', '%'], 'PTY': ['강수형태', '코드값'], 'LGT': ['낙뢰', '코드값'], 'VEC': ['풍향', '0'], 'WSD': ['풍속', '1'] } def get_standard_time(self): #기상청 API에서는 2,5,8,11,14,17,20,23의 baseTime만을 요청해야 #사용이 가능하다. 따라서 그 사이의 있는 시간은 저 시간안으로 바꿔저야한다. standard_time = [2, 5, 8, 11, 14, 17, 20, 23] return standard_time #url생성시 사용되는 함수 def setParameter(self, base_date=None, base_time=None, nx=None, ny=None, numOfRows=None, pageNo=None, _type=None): if base_date is not None: self.base_date = str(base_date) if base_time is not None: self.base_time = str(base_time) if nx is not None: self.nx = str(nx) if ny is not None: self.ny = str(ny) if numOfRows is not None: self.numOfRows = str(numOfRows) if pageNo is not None: self.pageNo = str(pageNo) if _type is not None: self._type = str(_type) @property def get_subUrl(self): #기상청에서 url로 데이터를 요청할때 필요한 url app_servicekey = 'serviceKey={}'.format(self.serviceKey) app_base_date = '&base_date={}'.format(self.base_date) app_base_time = '&base_time={}'.format(self.base_time) app_nx = '&nx={}'.format(self.nx) app_ny = '&ny={}'.format(self.ny) app_numOfRows = '&numOfRows={}'.format(self.numOfRows) app_pageNo = '&pageNo={}'.format(self.pageNo) app_type = '&_type={}'.format(self._type) return app_servicekey + app_base_date + app_base_time + app_nx + app_ny + app_numOfRows + app_pageNo + app_type #동네예보 조회 #지역한정으로 데이터를 준다. @property def ForestSpaceCheck(self): url = 'http://newsky2.kma.go.kr/service/SecndSrtpdFrcstInfoService2/ForecastSpaceData' com_url = "{}?{}".format(url, self.get_subUrl) return com_url #초단기예보조회 @property def ForestTimeData(self): url = ' http://newsky2.kma.go.kr/service/SecndSrtpdFrcstInfoService2/ForecastTimeData' com_url = "{}?{}".format(url, self.get_subUrl) return com_url #초단기실황조회 @property def ForesecastGrib(self): url = 'http://newsky2.kma.go.kr/service/SecndSrtpdFrcstInfoService2/ForecastGrib' com_url = "{}?{}".format(url, self.get_subUrl) return com_url #Json타입 형태에 url을 열때 사용하는 함수 def parsedUrl(self, url=None, url_type='json', type=None): #url을 파싱을 하는곳이다. Url데이터 형태에 따라 파싱에 사용되는 모듈이 #차이가 있기 때문에 요청되는 데이터의 따라서 처리가된다. #Json타입:json을 입력 #:xml을 입력 if url == None and type == None: print('url을 확인하세요') return if type == None: pass if url == None: if type == 'check': url = self.ForestSpaceCheck if type == 'grib': url = self.ForesecastGrib if type == 'data': url = self.ForestTimeData #Json if (url_type == 'json'): #넘어오는 url의 encoding형태는 utf8 이다. #따라서 url의 decoding을 utf8로 안해주시에 #문자들이 이상하게 파뀐다. urlopen = rs.urlopen(url).read().decode('utf8') #필요한 부분만 추출하는 과정이다. try: parsed_data = json.loads(urlopen)['response'] except: print(parsed_data) try: parsed_data = parsed_data['body'] except: print(parsed_data) try: parsed_data = parsed_data['items'] except: print(parsed_data) try: parsed_data = parsed_data['item'] except: print(parsed_data) return parsed_data def forestPredict(self, parsed_json=None, category=None): #요청된 3가지 유형중 하나일때 그것에 맞는 파싱을 해주는 함수이다. if (parsed_json == None or category == None): return c = parsed_json weather_category = category #test라는 리스트안에 날씨의 관련된 데이터를 파싱하여 집어넣는다. test = [] for z in range(len(c)): for i in weather_category: #특정값들은 수치값이 카테고리 값이기때문에 INT로 형변환시 문제가 발생하여 #특정 category의 값들은 손수 처리를 해줬다. if c[z]['category'] == i: if c[z]['category'] == 'SKY': test.append(self.cp.getSkycondition(c[z]['fcstValue'])) if c[z]['category'] == 'UUU': test.append( self.cp.getWindSpeed(c[z]['fcstValue'], type='UUU')) if c[z]['category'] == 'VVV': test.append( self.cp.getWindSpeed(c[z]['fcstValue'], type='VVV')) if c[z]['category'] == 'WAV': test.append(self.cp.getWeather16(c[z]['fcstValue'])) else: if len(weather_category.get(i)) < 2: test.append( weather_category.get(i)[0] + '은 ' + str(c[z]['fcstValue']) + ' 입니다') else: test.append( weather_category.get(i)[0] + '은 ' + str(c[z]['fcstValue']) + weather_category.get(i)[1] + ' 입니다') return test #요청한 시간을 기상청 API요청시 사용되는 시간에 맞게 해주는 함수이다. def timeCheck(self, time_table=None): a = datetime.datetime.now() for i in range(len(time_table)): if (i + 1 < len(time_table)): #기상청 API에서 데이터를 제공하는 시간은 baseTime 0200일때는 2시 10분이 넘어야 그때 정보가 갱신된다. #만약 데이터가 없는데 요청을 하는 경우에는 기상청 API에서 오류값들이 넘어온다. if a > datetime.datetime(a.year, a.month, a.day, time_table[i], 10) and a < datetime.datetime( a.year, a.month, a.day, time_table[i + 1], 10): #baseTime같은 경우 2시는 0200식의 문자열클래스로 반환이 되어야 한다. #따라서 zfill 문자열의 내장함수를 사용해서 처리를 한다. return str(time_table[i]).zfill(2) + '00' else: #기상청 API에서 가장 시간이 많이들어간 구문이다. #기상청 API에서 다음날 새벽 0100시에 요청을 해야할때는 baseDate가 그전날이여야한다. #현재 시간으로 요청할시에는 그 다음날에 다음날이 요청이 되기 때문에 기상청 API에서는 아직 예보가 안된 #baseDate이기 때문에 오류가 발생한다. 따라서 이 것을 처리하기 위해서 다음 조건문을 만들었따. #23시 10분에서 자정까지는 위에 문제가 발생을 안한다. if a > datetime.datetime( a.year, a.month, a.day, 23, 10) and a < datetime.datetime( a.year, a.month, a.day, 23, 10) + timedelta( hours=1, minutes=30): return '2300' #만약 새벽 0시에서 2시10분정에 시간에 요청을 할때는 그 전날 baseDate로 조정을 해야한다. if (a >= datetime.datetime(a.year, a.month, a.day, 0, 0) and a < datetime.datetime(a.year, a.month, a.day, 2, 10)): #base_date를 전날로 바꿔주는 객체를 선언한다. self.base_date = self.today.beforetime() return '2300' def getSiganlCategry(self, i): if i in self.forestspace_category: a = self.forestspace_category.get(i) return a def predictTest(self, a): #사용자가 쉽게 정보를 파악할수 있게 파싱을 해주는 함수 data = [] for i in a: c = '기준시간은 {}:{}이고 예측시간은{}:{}에 {}은 {}{}입니다.'.format( i['baseDate'], i['baseTime'], i['fcstDate'], i['fcstTime'], self.getSiganlCategry(i['category'])[0], i['fcstValue'], self.getSiganlCategry(i['category'])[1]) data.append(c) return data @property def getTime(self): #현재 요청한 기상청 API의 요청한 base날짜와 시간을 얻기 위한 함수 return "{} {}".format(self.base_date, self.base_time) def setData(self, parsed_data=None): #기상청 api에서 받은 데이터를 습도값과 비교하기 위해 저장을 한다. c = {} for n, i in enumerate(parsed_data): if parsed_data[n]['category'] == 'POP': #강수확률 c['POP'] = parsed_data[n]['fcstValue'] elif parsed_data[n]['category'] == 'R06': #6시간 강수량 c['RO6'] = parsed_data[n]['fcstValue'] elif parsed_data[n]['category'] == 'REH': #습도 c['REH'] = parsed_data[n]['fcstValue'] return c
# You should have received a copy of the GNU General Public License # along with dynovh. If not, see <http://www.gnu.org/licenses/> import OvhApi import key import urllib2, time, re, sys, xmlrpclib __author__ = 'hubert' #REFIP = 'checkip.dyndns.com' REFIP = 'checkip.pointfixe.fr' NBR_CONNEXION = 10 NBR_ATTEMPT_REFIP = 10 key = key.key() AK = key.appkey AS = key.appsec CK = key.conkey api = OvhApi.Api("https://eu.api.ovh.com/1.0", AK, AS, CK) def get_zone_id(zone): res = api.get('/domain/zone/' + zone[0] + '/record?fieldType=A&subDomain=' + zone[1]) if not isinstance(res, list): raise Exception(str(res)) if len(res) == 0: raise Exception("Le nom de domaine " + zone[0] + " ne contient pas de sous-domaine " + zone[1])
import json, urllib, urllib2 import key from bson.objectid import ObjectId app = Flask(__name__) app.secret_key = "don't store this on github" app.debug = True oauth = OAuth() facebook = oauth.remote_app('facebook', base_url='https://graph.facebook.com/', request_token_url=None, access_token_url='/oauth/access_token', authorize_url='https://www.facebook.com/dialog/oauth', consumer_key=key.key()[0], consumer_secret=key.key()[1], request_token_params={'scope': 'user_friends, email'} ) def login_required(f): @wraps(f) def inner(*args, **kwargs): if session["name"]==None: flash("You must login to access this protected page!") session['nextpage'] = request.url return redirect(url_for('login')) return f(*args, **kwargs) return inner @app.route('/', methods=["POST","GET"])
def __getattribute__(self, k): try: # exists return super(section, self).__getattribute__(k) except AttributeError: # not exists setattr(section, k, key(k)) return super(section, self).__getattribute__(k)
from pprint import pprint import numpy as np import requests import key from skimage.transform import resize from skimage import io from skimage import color from skimage import feature import matplotlib.pyplot as plt from sklearn.neural_network import MLPClassifier from sklearn.preprocessing import StandardScaler from sklearn.naive_bayes import GaussianNB from sklearn.svm import SVC from sklearn import preprocessing import key subscription_key = key.key() vision_base_url = 'https://westus.api.cognitive.microsoft.com/vision/v1.0' vision_analyze_url = vision_base_url + '/analyze?' print("import data from VizWiz...") base_url = 'https://ivc.ischool.utexas.edu/VizWiz/data' img_dir = '%s/Images/' % base_url # Retrieve file from ULR and store it locally split = 'train' train_file = '%s/Annotations/%s.json' % (base_url, split) train_data = requests.get(train_file, allow_redirects=True) #print(train_file) print("import training data successfully") # Read the local file
import pandas as pd import pymysql import time from http.server import BaseHTTPRequestHandler, HTTPServer from os import listdir from os.path import isfile, join import key instance = key.key() connection = pymysql.connect(host=instance.host, port=instance.port, user=instance.user, passwd=instance.passwd, charset='utf8', autocommit=True) ### Read rds at first # connection = pymysql.connect(host='codymonster-maria.cfnceagzudnn.ap-northeast-2.rds.amazonaws.com', port=3306, # user='******', passwd='306crewcodymonster', # charset='utf8', autocommit=True) # predictions.to_csv('C:/Users/MINSU/181228_test.csv',encoding='utf8') ############################################### HOST_NAME = '0.0.0.0' PORT_NUMBER = 9000 content = '' class MyHandler(BaseHTTPRequestHandler): def do_HEAD(self): self.send_response(200)
def AuthenticatedGet(self, org, event): id_param = self.request.get('id') latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( 'Where latitude = :1 and longitude = :2 and event = :3', latitude, longitude, event.key()): json_string = json.dumps({ "id": site.key().id(), "address": site.address, }) json_array.append(json_string) self.response.out.write( json.dumps(json_array, default = dthandler)) return if id_param == "all": status = self.request.get("status", default_value = "") page = self.request.get("page", default_value = "0") page_int = int(page) logging.debug("page = " + page) #query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) #query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class = site_db.Site) ids = [] #filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) #query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) #for q in query: #ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps( [s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) logging.info("after output") self.response.out.write(output) return #if id_param == "all": #county = self.request.get("county", default_value = "all") #status = self.request.get("status", default_value = "") #q = Query(model_class = site_db.Site, keys_only = True) ##filter by event #q.filter("event =", event.key()) #if status == "open": #q.filter("status >= ", "Open") #elif status == "closed": #q.filter("status < ", "Open") #if county != "all": #q.filter("county =", county) #ids = [key.id() for key in q.run(batch_size = 2000)] #output = json.dumps( #[s[1] for s in site_db.GetAllCached(event, ids)], #default=dthandler) #self.response.out.write(output) #return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write( json.dumps(site_db.SiteToDict(site), default = dthandler))
def main(): # from constants import * surface=[[' ' for x in range(width)]for y in range(height)] screen=set_screen(surface,width,height) #setting the basic screen surface_copy=copy.deepcopy(surface) surface[queen_x][queen_y]='Q' # creating instances Fire=fireball(surface,surface_copy,width,height) prince=player(surface,29,2) donk1=donkey(surface,9,2,-1) Donkey=[donk1] Coins=coins(width,height) Key=key(width,height,key_x,key_y) Coins.generate_coins() Coins.place_coins(surface) level=1 lives=3 score=0 queen_captured=False key_captured=False running=True print '\n\n' print_surface(surface,score,lives,level) while running: surface=copy.deepcopy(surface_copy) x='s' if queen_captured==False: x=getchar() # time.sleep(0.04) if x=='q': print 'You have decided to exit the game.' break else: queen_captured=False surface=Coins.place_coins(surface) if ord(x)==32: player_pos=prince.get_pos() surface[player_pos[0]-1][player_pos[1]]='P' surface[queen_x][queen_y]='Q' for i in range(len(Donkey)): surface=Donkey[i].donk_move(Fire,surface,Fire.flag%3) surface=Fire.update_balls_location(surface) surface=Coins.place_coins(surface) print_surface(surface,score,lives,level) time.sleep(0.3) surface=copy.deepcopy(surface_copy) surface[player_pos[0]][player_pos[1]]='P' surface[queen_x][queen_y]='Q' for i in range(len(Donkey)): surface=Donkey[i].donk_move(Fire,surface,Fire.flag%3) surface=Fire.update_balls_location(surface) surface=Coins.place_coins(surface) if Fire.fireball_hits_player(surface)==-1 : surface[player_pos[0]][player_pos[1]]='+' lives-=1 score-=25 running=False print_surface(surface,score,lives,level) for i in range(len(Donkey)): if Donkey[i].donk_hits_player(surface)==-1: surface[player_pos[0]][player_pos[1]]='+' lives-=1 score-=25 running=False print_surface(surface,score,lives,level) if running==False: if lives>0: print 'You lost a life.' time.sleep(1.5) surface=copy.deepcopy(surface_copy) surface=prince.reset_player_pos(surface) surface[queen_x][queen_y]='Q' for i in range(len(Donkey)): surface=Donkey[i].donk_move(Fire,surface,Fire.flag%3) surface=Coins.place_coins(surface) Fire.remove_fireballs() print_surface(surface,score,lives,level) running=True else: print 'You lost a life.' print "Game Over!! " else: print_surface(surface,score,lives,level) time.sleep(0.3) continue surface=prince.move(surface,x) # surface=make_surface_move(surface) surface[queen_x][queen_y]='Q' for i in range(len(Donkey)): surface=Donkey[i].donk_move(Fire,surface,Fire.flag%3) surface=Fire.update_balls_location(surface) surface=Coins.place_coins(surface) if prince.queen_captured(queen_x,queen_y): surface[queen_x][queen_y]='*' print_surface(surface,score,lives,level) queen_captured=True time.sleep(1.5) if queen_captured==True: print 'You have successfully completed level : ' +str(level) if level<3: level+=1 if level==3: donk2=donkey(surface,8,7,1) Donkey.append(donk2) print 'proceeding to level : ' + str(level) surface_copy=Key.change_level(surface_copy,level) surface=prince.reset_player_pos(surface) Coins.generate_coins() Fire.remove_fireballs() continue else : print 'You have won the game.' break if Key.player_gets_key(surface)==True: surface_copy=Key.restore_surface_copy(surface_copy) surface=Key.restore_surface_copy(surface) surface[key_x][key_y]='P' time.sleep(1) print 'You got the key.' print 'Now the queen is free.' print_surface(surface,score,lives,level) continue player_pos=prince.get_pos() if Fire.fireball_hits_player(surface)==-1 or Fire.check_for_swap(surface,prince.cur_dir)==-1: surface[player_pos[0]][player_pos[1]]='+' lives-=1 score-=25 running=False for i in range(len(Donkey)): if running==True: if Donkey[i].donk_hits_player==-1: surface[player_pos[0]][player_pos[1]]='+' lives-=1 score-=25 running=False if Coins.player_gets_a_coin(player_pos[0],player_pos[1])==1: score+=5 print_surface(surface,score,lives,level) if running==False: if lives>0: print 'You lost a life.' time.sleep(1.5) surface=copy.deepcopy(surface_copy) surface=prince.reset_player_pos(surface) surface[queen_x][queen_y]='Q' for i in range(len(Donkey)): surface=Donkey[i].donk_move(Fire,surface,Fire.flag%3) surface=Coins.place_coins(surface) Fire.remove_fireballs() print_surface(surface,score,lives,level) running=True else: print 'You lost a life.' print "Game Over!! "
def key_event(self, type, keycode): key.key(type, keycode)
import xml #xml형태의 파일을 읽고 처리하기 위한 모듈 from time_z import weatherTime #기상청 API시간에 필요한 함수들을 모은 모듈 from key import key #기상청 API에 접근할때 사용자에게 할당된 키 import datetime #날짜와 시간이 관련된 모듈 from datetime import timedelta #시간을 비교하기 위한 모듈 import pytz #시간 관련 모듈 한국기준 시간으로 잡아주느 모듈 from weater16 import categoryParsing as cp #파싱을 위한 카테고리 보관소 z_key = key() #초단기 버전 class key: def __init__(self): self.key = str(z_key.key) def input(self, key): self.key = key return class weatherUrl: ''' 기상청 API는 URL형태로 요청을 하면