def _load_data(): with app.open_resource('data/countries.json') as f: countries = json.load(f)['countries'] with app.open_resource('data/cities.json') as f: cities = json.load(f)['cities'] for city_key in cities: city = cities[city_key] country_name = countries[city['country']]['name'] city['key'] = city_key city['ikey'] = city_key.lower() city['type'] = 'city' city['full_display_name'] = city['display_name'] + ', ' + country_name city['search_name'] = city['full_display_name'].lower() city['sw_prim'] = city['display_name'].lower().split() city['sw_sec'] = city['sw_prim'] + country_name.lower().split() with app.open_resource('data/timezones.json') as f: timezones = json.load(f)['timezones'] for timezone_key in timezones: timezone = timezones[timezone_key] timezone['key'] = timezone_key timezone['ikey'] = timezone_key.lower() timezone['timezone'] = timezone_key timezone['type'] = 'timezone' timezone['full_display_name'] = '%s (%s)' % ( timezone['name'], timezone['short'] ) timezone['search_name'] = timezone['full_display_name'].lower() timezone['sw'] = (timezone['name'] + ' ' + \ timezone['short']).replace('/', ' ').lower().split() return countries, cities, timezones
def queryCitiesInfo(): states_json = open(os.path.join(APP_STATIC,'data','states.json') ) states = json.load(states_json) devices_json = open(os.path.join(APP_STATIC,'data','devices.json') ) devices = json.load(devices_json) #print devices connection_json = open(os.path.join(APP_STATIC,'data','devicesConnections.json') ) connection = json.load(connection_json) links=[] links_out={} for link in connection: if link["type"] == "innerConnect": for dev in devices: if link["source"]["deviceName"] == dev["properties"]["name"]: link['source']["coordinates"] = dev["geometry"]["coordinates"] if link["target"]["deviceName"] == dev["properties"]["name"]: link['target']["coordinates"] = dev["geometry"]["coordinates"] if link['source']["coordinates"] != link['target']["coordinates"]: #links.append({"type":"LineString","detail":"Capacity:"+link["source"]["capacity"]+"<br>From:"+link["source"]["deviceName"] +" "+ # link["source"]["port"]+"<br>To:"+link["target"]["deviceName"]+" "+link["target"]["port"], # "coordinates":[link["source"]["coordinates"],link["target"]["coordinates"]]}) links.append({"type": "Feature","properties":{"Capacity":link["source"]["capacity"],"fromPort":link["source"]["port"],"toPort":link["target"]["port"]}, "geometry":{"type":"LineString","coordinates":[link["source"]["coordinates"],link["target"]["coordinates"]]}}) links_out["feathers"]=links links_out["type"]="FeatureCollection" return json.dumps({"states":states,"devices":devices,"links":links_out})
def login_submit(): user_dir = os.path.join(app.config['DATA_PATH'], "user") user_files = [os.path.join(user_dir, f) for f in os.listdir(user_dir)] user_files = [f for f in user_files if os.path.isfile(f)] for user_file in user_files: with open(user_file, 'r') as f: data = json.load(f) if request.form["username"] == data["username"]: user_id = data["id"] break else: # no break flash("User name does not exist!", 'error') return render_template('login.djhtml') information_file = os.path.join(user_dir, "{}.txt".format(user_id)) try: with open(information_file, 'r') as read_file: user_data = json.load(read_file) if user_data["password"] != request.form["password"]: # TODO: Remember username entered??? flash("User name or password is incorrect!", 'error') return render_template('login.djhtml') except EnvironmentError: abort(500) session["logged_in"] = True session["username"] = user_data["username"] session["user_id"] = user_id flash("You have logged in!", 'success') return redirect(url_for('index'))
def build_url(component, filename, **values): """ search bower asset and build url :param component: bower component (package) :type component: str :param filename: filename in bower component - can contain directories (like dist/jquery.js) :type filename: str :param values: additional url parameters :type values: dict[str, str] :return: url :rtype: str | None """ root = current_app.config['BOWER_COMPONENTS_ROOT'] bower_data = None package_data = None # check if component exists in bower_components directory if not os.path.isdir('/'.join([current_app.root_path, root, component])): # FallBack to default url_for flask return None # load bower.json of specified component bower_file_path = '/'.join([current_app.root_path, root, component, 'bower.json']) if os.path.exists(bower_file_path): with open(bower_file_path, 'r') as bower_file: bower_data = json.load(bower_file) # check if package.json exists and load package.json data package_file_path = '/'.join([current_app.root_path, root, component, 'package.json']) if os.path.exists(package_file_path): with open(package_file_path, 'r') as package_file: package_data = json.load(package_file) # check if specified file actually exists if not os.path.exists('/'.join([current_app.root_path, root, component, filename])): return None # check if minified file exists (by pattern <filename>.min.<ext> # returns filename if successful if current_app.config['BOWER_TRY_MINIFIED']: if '.min.' not in filename: minified_filename = '%s.min.%s' % tuple(filename.rsplit('.', 1)) minified_path = '/'.join([root, component, minified_filename]) if os.path.exists('/'.join([current_app.root_path, minified_path])): filename = minified_filename # determine version of component and append as ?version= parameter to allow cache busting if current_app.config['BOWER_QUERYSTRING_REVVING']: if bower_data is not None and 'version' in bower_data: values['version'] = bower_data['version'] elif package_data is not None and 'version' in package_data: values['version'] = package_data['version'] else: values['version'] = os.path.getmtime('/'.join([current_app.root_path, root, component, filename])) return url_for('bower.serve', component=component, filename=filename, **values)
def statistics(): cdb_file = open(os.path.join(basedir, "db", "dut.json"), "r") cdb_stats = json.load(cdb_file) aspera_file = open(os.path.join(basedir, "db", "aspera.json"), "r") aspera_stats = json.load(aspera_file) return render_template('stats.html', cinab_stats=get_cinab_stats(), cdb_stats=cdb_stats, aspera_stats=aspera_stats)
def initmoddb(): moddb.remove({}) #blood for the blood god pjsonf = open("static/json/yolo.json") pjson = json.load(pjsonf) pjsonf.close() moddb.insert(pjson) pjsonf = open("static/json/turtle.json") pjson = json.load(pjsonf) pjsonf.close() moddb.insert(pjson)
def initsheetdb(): sheetdb.remove({}) #blood for the blood god bobjsonf = open("static/json/bob.json") bobjson = json.load(bobjsonf) bobjsonf.close() sheetdb.insert(bobjson) bobjsonf = open("static/json/thered.json") bobjson = json.load(bobjsonf) bobjsonf.close() sheetdb.insert(bobjson) base.addID("testing", 0)
def get_last_test_report(name): url_sufix = "/api/json" url = get_test_url(name) runs = json.load(urllib2.urlopen(url + url_sufix)) for build in runs['builds']: info = json.load(urllib2.urlopen(build['url'] + url_sufix)) if info['building'] == False: break return json.load(urllib2.urlopen(info['url'] + "testReport" + url_sufix))
def load_config(resource_json, permission_json): """load resource.json and permission.json :param resource_json: path of resource.json :param permission_json: path of permission.json """ try: with codecs.open(resource_json, encoding="utf-8") as f_resource, \ codecs.open(permission_json, encoding="utf-8") as f_permission: return json.load(f_resource), json.load(f_permission) except IOError as e: e.strerror = "can't load auth config file: %s" % e.strerror raise
def rebuild(game, rebuild_all): # Load words from JSON config files with open("games/{}_words.json".format(game)) as words_file: words_json = json.load(words_file) things = words_json["things"] modifiers = words_json["modifiers"] with open("games/{}_users.json".format(game)) as users_file: users_json = json.load(users_file) users = users_json["users"] with sqlite3.connect("games/{}.db".format(game)) as db: cursor = db.cursor() cursor.execute("DROP TABLE IF EXISTS words") if rebuild_all: cursor.execute("DROP TABLE IF EXISTS users") cursor.execute("DROP TABLE IF EXISTS phrases") cursor.execute("DROP TABLE IF EXISTS ratings") cursor.execute("DROP TABLE IF EXISTS notifications") cursor.execute("pragma foreign_keys = on") cursor.execute("CREATE TABLE words(word, type)") if rebuild_all: cursor.execute("CREATE TABLE users(name PRIMARY KEY UNIQUE)") cursor.execute("CREATE TABLE phrases(id INTEGER PRIMARY KEY AUTOINCREMENT, a,b)") cursor.execute(''' CREATE TABLE ratings(phrase_id INTEGER KEY, user_name, rating INTEGER, FOREIGN KEY(phrase_id) REFERENCES phrases(id), FOREIGN KEY(user_name) REFERENCES users(name), CONSTRAINT unq UNIQUE(phrase_id, user_name)) ''') for u in users: cursor.execute("INSERT INTO users (name) VALUES(?)", (u,)) cursor.execute(''' CREATE TABLE notifications(id INTEGER PRIMARY KEY AUTOINCREMENT, user_name, read INTEGER DEFAULT 0, text, FOREIGN KEY(user_name) REFERENCES users(name)) ''') print "Inserting {} things and {} modifiers into the database.".format(len(things), len(modifiers)) for t in things: cursor.execute("INSERT INTO words (word, type) VALUES(?,?)", (t, "thing")) for a in modifiers: cursor.execute("INSERT INTO words (word, type) VALUES(?,?)", (a, "modifier")) db.commit() return "Database rebuilt!"
def ensure_login(): logger = logging.getLogger(__name__ + '.ensure_login') try: login = session['login'] except KeyError: session.pop('access', None) back = base64.urlsafe_b64encode(request.url) params = { 'client_id': current_app.config['CLIENT_ID'], 'redirect_uri': url_for('auth', back=back, _external=True), 'scope': 'repo' } return redirect('https://github.com/login/oauth/authorize?' + url_encode(params)) logger.debug('login = %r', login) try: auth, ltime = session['access'] except (KeyError, ValueError): auth = False ltime = None if ltime is None or ltime < datetime.datetime.utcnow() - EXPIRES: repo_name = current_app.config['REPOSITORY'] # user repos response = urllib2.urlopen( 'https://api.github.com/user/repos?per_page=100&access_token=' + login ) repo_dicts = json.load(response) response.close() repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('repos = %r', repos) auth = repo_name in repos # org repos if not auth: url = 'https://api.github.com/orgs/{0}/repos?per_page=100&access_token={1}' try: response = urllib2.urlopen( url.format(repo_name.split('/', 1)[0], login) ) except IOError: auth = False else: repo_dicts = json.load(response) response.close() org_repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('org_repos = %r', org_repos) auth = repo_name in org_repos session['access'] = auth, datetime.datetime.utcnow() if not auth: abort(403) logger.debug('auth = %r', auth)
def sgvjson(): json_url = os.path.join("/root/myopenaps/settings/profile.json") data = json.load(open(json_url)) units = data['out_units'] count = request.args.get('count', default = 10, type = int) if os.path.getmtime("/root/myopenaps/xdrip/glucose.json") > os.path.getmtime("/root/myopenaps/monitor/glucose.json"): json_url = os.path.join("/root/myopenaps/xdrip/glucose.json") else: json_url = os.path.join("/root/myopenaps/monitor/glucose.json") data = json.load(open(json_url)) if units == "mg/dL": data[0]['units_hint'] = "mgdl" else: data[0]['units_hint'] = "mmol" return jsonify(data[0:count])
def avatar(self, avatar_via, size=500, small_size=100, url=None): avatar_urls = dict( facebook=lambda s: "http://graph.facebook.com/{facebook_id}/picture?width={size}&" "height={size}&redirect=0".format(facebook_id=self.facebook_id, size=s), google=lambda s: "https://www.googleapis.com/plus/v1/people/{google_id}?" "fields=image&key={key}".format(google_id=self.google_id, size=s, key=Config.GOOGLE_API_KEY_SIMPLE), linkedin=lambda s, u=url: u if u else self.gravatar(size=s), gravatar=lambda s: self.gravatar(size=s), microsoft=lambda _: "https://apis.live.net/v5.0/{microsoft_id}/picture".format( microsoft_id=self.microsoft_id ), ) url = avatar_urls[avatar_via](size) url_small = avatar_urls[avatar_via](small_size) if avatar_via == "facebook": avatar = json.load(req.urlopen(url=url)) avatar_small = json.load(req.urlopen(url=url_small)) if avatar["data"].get("is_silhouette"): self.profireader_avatar_url = self.gravatar(size=size) self.profireader_small_avatar_url = self.gravatar(size=small_size) else: self.profireader_avatar_url = avatar["data"].get("url") self.profireader_small_avatar_url = avatar_small["data"].get("url") elif avatar_via == "google": avatar = json.load(req.urlopen(url=url)) avatar_small = json.load(req.urlopen(url=url_small)) if avatar["image"].get("isDefault"): self.profireader_avatar_url = self.gravatar(size=size) self.profireader_small_avatar_url = self.gravatar(size=small_size) else: self.profireader_avatar_url = avatar["image"].get("url") self.profireader_small_avatar_url = avatar_small["image"].get("url") elif avatar_via == "linkedin": self.profireader_avatar_url = url self.profireader_small_avatar_url = url elif avatar_via == "microsoft": avatar = req.urlopen(url=url) if "Default" not in avatar.url: self.profireader_avatar_url = avatar.url self.profireader_small_avatar_url = avatar.url else: self.profireader_avatar_url = self.gravatar(size=size) self.profireader_small_avatar_url = self.gravatar(size=small_size) elif avatar_via == "gravatar": self.profireader_avatar_url = url self.profireader_small_avatar_url = url_small return self
def readConfig(): #config_file # = "config.json" with open(config_file, mode='r') as out: input_json = json.load(out) out.close() return input_json
def theater(): """ Serves the theater view from the index URL """ # Store scene list on global object g.standard_scenes = STANDARD_SCENES # Render markdown from about ('home.md') file and store on global object with open(os.path.join(PATHS['home'], 'home.md')) as home_file: g.home = Markup(markdown(home_file.read())) # Load project index data structure into global object with open(os.path.join(PATHS['projects'],'project_index.json')) as index_file: g.project_index = json.load(index_file)['project_index'] # Create scenes dict on global object and populate with standard scenes... g.scenes = {} for scene in g.standard_scenes: g.scenes[scene] = Markup(render_template(scene + '.html')) # ...and project scenes for filename in os.listdir(PATHS['projects']): if filename.endswith('.md'): with open(os.path.join(PATHS['projects'], filename)) as project_file: g.scenes[filename.replace('.md', '')] = Markup(markdown(project_file.read())) # Render page return render_template('theater.html')
def pnr_api(pnr): """ Returns the PNR data in JSON after fetching from Indian Railways website. """ if is_pnr_dummy(pnr): return jsonify(json.load(open("dummy_response.json","r"))) if is_pnr_valid(pnr): response = requests.post(BASE_URL, data={PARAM_NAME : pnr}) if response.status_code is 200: pnr_data = parse_html(response.content) if not pnr_data: return jsonify({'status' : 'PNR FLUSHED / SERVICE UNAVAILABLE', 'data' : {} }) return jsonify({'status' : 'OK', 'data' : build_response_dict(pnr_data) }) else: return jsonify({'status' : 'ERROR', 'data' : {} }) else: return jsonify({'status' : 'INVALID PNR', 'data' : {} })
def predict_appliance(home, appliance, feature): if home in all_homes[appliance]: home_to_pick=home else: home_to_pick=all_homes[appliance][0] print home_to_pick feature_dict = json.load(open("../data/output/sensitivity-numfeatures-allhomes/%s_%s_%d.json" %(appliance,feature, home_to_pick),"r")) f = feature_dict['f'] k = feature_dict['k'] clf = KNeighborsRegressor(n_neighbors=k) nn = NearestNeighbors(n_neighbors=k) df_new =df.copy() df_new = df_new.ix[all_homes[appliance]] df_new = df_new.ix[~df_new.index.isin([home])] #df_new = df_new.drop(home, axis=1) nn.fit(df_new[f].dropna()) distances, indices = nn.kneighbors(df.ix[home][f]) out = [] nghbrs_list = df_new.index[indices].values[0] for month in range(1, 13): if len(nghbrs_list>1): out.append(df_new[["%s_%d" %(appliance, month) ]].ix[nghbrs_list].sum().values[0]/k) else: out.append(df_new[["%s_%d" %(appliance, month) ]].ix[nghbrs_list].values[0]/k) return out
def load_config(self): try: config_fp = open(os.path.join(self._config_path, 'ISYEcho.json')) config = json.load(config_fp) config_fp.close() except IOError: config = {} secret_key = config.get('SecretKey') if secret_key is not None: self._secret_key = base64.b64decode(secret_key) else: self._secret_key = None self._Isy_Url = config.get('IsyUrl', '') #if self._Isy_Url == '': # from ISY.IsyDiscover import isy_discover # result = isy_discover(timeout=30, count=1) # if len(result) == 1: # import urlparse # self._Isy_Url = urlparse.urlparse(result.values()[0]['URLBase']).netloc self._Isy_User = config.get('IsyUser', '') self._Isy_Pass = config.get('IsyPass', '') self.username = config.get('Username', '') self.password = config.get('Password', '') self.amazon_email = config.get('AmazonEmail', '') for light_cfg in config.get('lights', {}): light = Light.Light(settings=light_cfg) self._lights[light.address] = light
def login_signin(): if request.form['username']==monitor_cfg.config_user and request.form['password']==monitor_cfg.config_passwd : with open('static/ip.json','r') as f : host_json = json.load(f) return render_template("monitor.html",title = monitor_cfg.config_webtitle,data = json.dumps(host_json,ensure_ascii=False) ) else: return '<h1 align="center">账号或密码错误!</h1>'
def login(): """ Login handler. Check password from user file and add <username> to session storage. """ if request.method == 'POST': username = escape(request.form.get('username', None)) password = request.form.get('password', None) next_url = request.args.get('next', url_for('main')) if not username or not password: flash('Fill all fields!', 'error') return redirect(url_for('login', next=next_url)) try: user_file = safe_join(app.config['USERS_FOLDER'], username) with open(user_file, 'r') as uf: user_conf = load(uf) # user_file on json format if not sha256_crypt.verify(password, user_conf['password']): # check password flash('Wrong password!', 'error') return redirect(url_for('login', next=next_url)) else: flash('You successfully logged in!', 'info') session['username'] = username settings_write(username, 'last_login', int(time.time())) except FileNotFoundError: flash('User not exist!', 'error') return redirect(url_for('login', next=next_url)) except Exception: abort(500) return redirect(next_url) return render_template('login.html')
def terms_and_conditions(): try: data = json.load( open("app/static/text/terms_and_conditions.json", "r")) return jsonify(data) except Exception as e: return page_not_found(e)
def get(self, url): for matcher in self.url_matches: if matcher[0].fullmatch(url): return SimpleNamespace(data=json.load( open(os.path.join(app.root_path, 'mock/fixtures/' + matcher[1])) )) return SimpleNamespace(data={})
def __init__(self): stringList = '' with open('todo.json','rb') as fp: todoList = dict(json.load(fp)) for key in todoList.keys(): stringList=stringList+key flask.flash(stringList)
def SearchRepoName(repo_name): with open('redirectChannel.json', 'r') as json_file: channel_selector = json.load(json_file) if repo_name in channel_selector: return channel_selector[repo_name] else: return None
def updateInfos(): print("Updating Infos!") with open('static/sp100.json', 'rb') as f: ls = json.load(f) for i in ls: timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") print (i['name']) symbol = Share(i['name']) item = { 'name': i['name'], 'price': symbol.get_price(), 'time': timestamp, 'prev_close': symbol.get_prev_close(), 'open': symbol.get_open(), 'volume': symbol.get_volume(), 'pe': symbol.get_price_earnings_ratio(), 'eps': symbol.get_earnings_share(), 'price_sales': symbol.get_price_sales(), 'ebitda': symbol.get_ebitda(), 'hotness': ms.hotness_function(i['name']), 'BS': ms.bs_function(i['name'])} db.infos.update( {"name": i['name']}, { "$push": {"data": item} } ) print('Collection Infos Updated.') return Response('Collection Infos Updated.')
def capacity(): """Render and show capacity page """ # Read capacity of groups from json file json_data = open('data/capacity_group.json') cap_data = json.load(json_data)['data'] # Read current data cur_data = db.get_latest_data(g.cursor) locations = [] # Loop to find corresponding cur_client_count with capacity # and store it in locations for cap in cap_data: groupName = cap['group_name'] capacity = cap['capacity'] for latest in cur_data: if latest['group_name'] == groupName: cur_client_count = latest['client_count'] break # Cast one of the numbers into a float, get a percentile by multiplying # 100, round the percentage and cast it back into a int. percent_full = int(round(float(cur_client_count)/capacity*100)) locations.append({"name": groupName, "fullness": percent_full}) return render_template('capacity.html', locations=locations)
def createInfos(): if db.infos.count() == 0: print("Creating Infos!!") with open('static/sp100.json', 'rb') as f: ls = json.load(f) for i in ls: timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") symbol = Share(i['name']) item = { 'name': i['name'], 'price': symbol.get_price(), 'time': timestamp, 'prev_close': symbol.get_prev_close(), 'open': symbol.get_open(), 'volume': symbol.get_volume(), 'pe': symbol.get_price_earnings_ratio(), 'eps': symbol.get_earnings_share(), 'price_sales': symbol.get_price_sales(), 'ebitda': symbol.get_ebitda(), 'hotness': ms.hotness_function(i['name']), 'BS': ms.bs_function(i['name'])} db.infos.insert_one({ "name": i['name'], "sector": i['sector'], "data": [item] }) print('Collection Infos Created.') return Response('Collection Infos Created.')
def __init__(self, path): #: The theme's root path. All the files in the theme are under this #: path. self.path = os.path.abspath(path) with open(os.path.join(self.path, "info.json")) as fd: self.info = i = json.load(fd) #: The theme's name, as given in info.json. This is the human #: readable name. self.name = i["name"] #: The application identifier given in the theme's info.json. Your #: application will probably want to validate it. self.application = i["application"] #: The theme's identifier. This is an actual Python identifier, #: and in most situations should match the name of the directory the #: theme is in. self.identifier = i["identifier"] #: The human readable description. This is the default (English) #: version. self.description = i.get("description") #: This is a dictionary of localized versions of the description. #: The language codes are all lowercase, and the ``en`` key is #: preloaded with the base description. self.localized_desc = dict( (k.split("_", 1)[1].lower(), v) for k, v in i.items() if k.startswith("description_") ) self.localized_desc.setdefault("en", self.description) #: The author's name, as given in info.json. This may or may not #: include their email, so it's best just to display it as-is. self.author = i["author"] #: A short phrase describing the license, like "GPL", "BSD", "Public #: Domain", or "Creative Commons BY-SA 3.0". self.license = i.get("license") #: A URL pointing to the license text online. self.license_url = i.get("license_url") #: The URL to the theme's or author's Web site. self.website = i.get("website") #: The theme's preview image, within the static folder. self.preview = i.get("preview") #: The theme's doctype. This can be ``html4``, ``html5``, or ``xhtml`` #: with html5 being the default if not specified. self.doctype = i.get("doctype", "html5") #: The theme's version string. self.version = i.get("version") #: Any additional options. These are entirely application-specific, #: and may determine other aspects of the application's behavior. self.options = i.get("options", {})
def LoadFile(self): print "Loading passive "+FileLocation if os.path.isfile(FileLocation): filePointer = open(FileLocation,"r") json_load = json.load(filePointer) self.CurrentPassive = Passive(json_load) filePointer.close()
def get_component_name(name,file_name): jfile = open(str(file_name) + '.json', 'r') json_file = json.load(jfile) component = [component for component in json_file["components"] if component['name'] == name] if len(component) == 0: abort(404) return jsonify({'component': component[0]})
def checkcrime(lat, lon,radius): command="https://api.spotcrime.com/crimes.json?lat=%s&lon=%s&radius=%s&key=."%(lat,lon,radius) response = urllib2.urlopen(command) data = json.load(response) listdata= data.get("crimes") total_crime=len(listdata) event_time_count ={ "12:01am-3am" : 0, "3:01am-6am" : 0, "6:01am-9am" : 0, "9:01am-12noon" : 0, "12:01pm-3pm" : 0, "3:01pm-6pm" : 0, "6:01pm-9pm" : 0, "9:01pm-12midnight" : 0 } crime_type={} streets={} for item in listdata: for key in item: if key=='type': if crime_type.has_key(item[key]): crime_type[item[key]]=crime_type[item[key]]+1 else: crime_type[str(item[key])]=1 elif key=='date': ls=item[key].split(' ', 1 ) if "AM" in ls[1]: hours=ls[1].split(':', 1 ) if (hours[0]=='12' and hours[1] !='00 AM') or hours[0]=='01' or hours[0]=='02' or (hours[0]=='03' and hours[1]=='00 AM'): event_time_count["12:01am-3am"]=event_time_count["12:01am-3am"]+1 elif (hours[0]=='03' and hours[1] !='00 AM') or hours[0]=='04' or hours[0]=='05' or (hours[0]=='06' and hours[1]=='00 AM'): event_time_count["3:01am-6am"]=event_time_count["3:01am-6am"]+1 elif (hours[0]=='06' and hours[1] !='00 AM') or hours[0]=='07' or hours[0]=='08' or (hours[0]=='09' and hours[1]=='00 AM'): event_time_count["6:01am-9am"]=event_time_count["6:01am-9am"]+1 elif (hours[0]=='09' and hours[1] !='00 AM') or hours[0]=='10' or hours[0]=='11': event_time_count["9:01am-12noon"]=event_time_count["9:01am-12noon"]+1 elif hours[0]=='12' and hours[1]=='00 AM': event_time_count["9:01pm-12midnight"]=event_time_count["9:01pm-12midnight"]+1 else: hours=ls[1].split(':', 1 ) if (hours[0]=='12' and hours[1] !='00 PM') or hours[0]=='01' or hours[0]=='02' or (hours[0]=='03' and hours[1]=='00 PM'): event_time_count["12:01pm-3pm"]=event_time_count["12:01pm-3pm"]+1 elif (hours[0]=='03' and hours[1] !='00 PM') or hours[0]=='04' or hours[0]=='05' or (hours[0]=='06' and hours[1]=='00 PM'): event_time_count["3:01pm-6pm"]=event_time_count["3:01pm-6pm"]+1 elif (hours[0]=='06' and hours[1] !='00 PM') or hours[0]=='07' or hours[0]=='08' or (hours[0]=='09' and hours[1]=='00 PM'): event_time_count["6:01pm-9pm"]=event_time_count["6:01pm-9pm"]+1 elif (hours[0]=='09' and hours[1] !='00 PM') or hours[0]=='10' or hours[0]=='11': event_time_count["9:01pm-12midnight"]=event_time_count["9:01pm-12midnight"]+1 elif hours[0]=='12' and hours[1]=='00 PM': event_time_count["9:01am-12noon"]=event_time_count["9:01am-12noon"]+1 elif key=='address': item[key]=str(item[key]) if " AND " in item[key]: st=item[key].split(' AND ',1) if streets.has_key(st[0]): streets[st[0]]=streets[st[0]]+1 else: streets[st[0]]=1 if streets.has_key(st[1]): streets[st[1]]=streets[st[1]]+1 else: streets[st[1]]=1 elif " BLOCK BLOCK " in item[key]: st=item[key].split(' BLOCK BLOCK ',1) if streets.has_key(st[1]): streets[st[1]]=streets[st[1]]+1 else: streets[st[1]]=1 elif " BLOCK OF " in item[key]: st=item[key].split(' BLOCK OF ',1) if streets.has_key(st[1]): streets[st[1]]=streets[st[1]]+1 else: streets[st[1]]=1 elif " BLOCK " in item[key]: st=item[key].split(' BLOCK ',1) if streets.has_key(st[1]): streets[st[1]]=streets[st[1]]+1 else: streets[st[1]]=1 elif " & " in item[key]: st=item[key].split(' & ') if streets.has_key(st[1]): streets[st[1]]=streets[st[1]]+1 else: streets[st[1]]=1 if streets.has_key(st[0]): streets[st[0]]=streets[st[0]]+1 else: streets[st[0]]=1 else: if streets.has_key(item[key]): streets[item[key]]=streets[item[key]]+1 else: streets[item[key]]=1 sorted_x = sorted(streets.items(), key=operator.itemgetter(1)) n=len(sorted_x) the_most_dangerous_streets=[] for i in range(1,4,1): if sorted_x[n-i]!=0: the_most_dangerous_streets.append(sorted_x[n-i][0]) key_value_pairs = [("total_crime",total_crime),("the_most_dangerous_streets",the_most_dangerous_streets),("crime_type_count",crime_type),("event_time_count",event_time_count)] result = collections.OrderedDict(key_value_pairs) yield result
def loadData(): SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "books.json") data = json.load(open(json_url)) return data
def get_sortinghat_question(): filename = 'app/sh_questions.json' with open(filename) as questions: data = json.load(questions) return json.jsonify(data)
def new_player(): response_object = {'status': 'success'} with open("server/data/new_player.json", "r") as text_file: response_object['new_player'] = json.load(text_file)['new_player'] return jsonify(response_object)
from flask import json, jsonify, request from . import product_api_blueprint from models import db, Product with open('database/products.json') as f: data = json.load(f) @product_api_blueprint.route('/api/products', methods=['GET']) def products(): items = [] for row in Product.query.all(): items.append(row.to_json()) response = jsonify(items) return response @product_api_blueprint.route('/api/product/<slug>', methods=['GET']) def product(slug): item = Product.query.filter_by(slug=slug).first() if item is not None: response = jsonify({'result': item.to_json()}) else: response = jsonify({'message': 'Cannot find product'}), 404 return response
def homepage(): PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(PROJECT_ROOT, 'static/movies.json') data = json.load(open(json_url)) return render_template("main.html", data=data)
"china well": date(2016, 11, 12), "clinton blames comey": date(2016, 11, 12), "clinton clear": date(2016, 11, 7), "consulate attack": date(2016, 11, 10), "toberone-gate": date(2016, 11, 8), "tram": date(2016, 11, 9), "tram victims": date(2016, 11, 12) } # make it json-able for event_date in event_dates: event_dates[event_date] = event_dates[event_date].isoformat() for filepath in glob.glob(os.path.dirname(__file__) + '/../data/*.json'): with open(filepath, encoding='utf-8') as data_file: data = json.load(data_file) if data.get('category') is not None: annotated = deep_access(data, 'fiveWoneH.when.annotated') if annotated is not None: for anno in annotated: anno['parsed'] = event_dates[data['category']] # save it back with open(filepath, 'w', encoding='utf8') as outfile: outfile.write(json.dumps(data, indent=2, sort_keys=True)) outfile.close() print('done')
def renderbook(): filename = os.path.join(app.static_folder, 'books.json') with open(filename) as book_file: data = json.load(book_file) return jsonify(data)
def showjson(): SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "blockchain.json") data = json.load(open(json_url)) return render_template('showjson.jade', data=data)
app = Flask(__name__) app.config['SECRET_KEY'] = 'the quick brown fox jumps over the lazy dog' app.config['CORS_HEADERS'] = 'Content-Type' cors = CORS(app, resources={r"*": {"origins": "*"}}) # w3 = Web3(Web3.HTTPProvider("http://127.0.0.1:7545")) w3 = Web3(Web3.HTTPProvider("http://*****:*****@app.route("/demo1/<string:gg>", methods=['POST']) def demotw(gg): user = w3.eth.contract(address=contract_address, abi=abi) tx_hash = user.functions.setMessage(gg).transact() print(tx_hash); return jsonify({"data": "Raja"}), 200
def send_json(): json_url = os.path.join("static/data", "example.json") data = json.load(open(json_url)) return (data)
logging.basicConfig(level=logging.DEBUG) application = Flask(__name__) ####################### # Flask Restless Setup ####################### # Allow us to test locally if desired if 'docker' in sys.argv: database_choice = 'docker_database' elif 'remote' in sys.argv: database_choice = 'remote_database' else: database_choice = 'codefordc_remote_admin' with open('housinginsights/secrets.json') as f: secrets = json.load(f) connect_str = secrets[database_choice]['connect_str'] logging.info("Connecting to database {}".format(database_choice)) application.config['SQLALCHEMY_DATABASE_URI'] = connect_str application.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(application) Base = automap_base() metadata = MetaData(bind=db) Base.prepare(db.engine, reflect=True) db.session.commit()
from flask import json with open("mappings/services.json") as services: SERVICES_MAPPING = json.load(services) FILTER_FIELDS = sorted( field.replace('filter_', '') for field in SERVICES_MAPPING['mappings']['services']['properties'].keys() if field.startswith('filter_') ) TEXT_FIELDS = sorted( field for field in SERVICES_MAPPING['mappings']['services']['properties'].keys() if not field.startswith('filter_') )
def get_plugin_data_extend(self): json_data_extend = open(self.c.file_db_extend) data_db_extend = json.load(json_data_extend) json_data_extend.close() return (data_db_extend)
def response(): os.system('python3 ./neuronal_network/main.py') json_url = os.path.join("static/data", "example.json") data = json.load(open(json_url)) return render_template('response.html', data=data)
def get_data(filepath: str) -> list: with open(filepath, 'r', encoding='utf-8') as file: return json.load(file)
def check_auth(email, password): user_data = json.load(open('users.json')) if(email == user_data.get('email') and password == user_data.get('password')): return True return False
import os import time from flask import Flask, render_template, url_for, session, request, json, redirect, flash from operator import itemgetter app = Flask(__name__) app.secret_key = os.getenv("SECRET", "not a secretgit pu") max_attempts = 3 with open("data/questions.json") as questions_file: questions = json.load(questions_file) @app.route('/') def index(): return render_template('index.html') @app.route('/start_game', methods=['POST']) def start_game(): session['username'] = request.form['username'] session['score'] = 0 session['question_number'] = 0 session['attempts_left'] = max_attempts session['start_time'] = time.time() return redirect('/game') @app.route('/game_over') def game_over(): return render_template('game_over.html')
def print_pdftk(stamp_print): # check if json_file is in the request # try: if 'json_file' in request.files: total_no_of_pages = 0 page_no = 1 has_sa_schedules = has_sb_schedules = False json_file = request.files.get('json_file') # generate md5 for json file # FIXME: check if PDF already exist with md5, if exist return pdf instead of re-generating PDF file. json_file_md5 = utils.md5_for_file(json_file) json_file.stream.seek(0) md5_directory = current_app.config['OUTPUT_DIR_LOCATION'].format(json_file_md5) os.makedirs(md5_directory, exist_ok=True) infile = current_app.config['FORM_TEMPLATES_LOCATION'].format('F3L') # save json file as md5 file name json_file.save(current_app.config['REQUEST_FILE_LOCATION'].format(json_file_md5)) outfile = md5_directory + json_file_md5 + '_temp.pdf' # load json file f3l_json = json.load(open(current_app.config['REQUEST_FILE_LOCATION'].format(json_file_md5))) # setting timestamp and imgno to empty as these needs to show up after submission output = {} if stamp_print != 'stamp': output['FILING_TIMESTAMP'] = '' output['IMGNO'] = '' # read data from json file f3l_data = f3l_json['data'] # check if summary is present in fecDataFile f3l_summary = [] if 'summary' in f3l_data: f3l_summary = f3l_data['summary'] # split coverage start date and coverage end date to set month, day, and year if f3l_data['coverageStartDate'] and len(f3l_data['coverageStartDate']) > 0: coverage_start_date_array = f3l_data['coverageStartDate'].split("/") f3l_data['coverageStartDateMonth'] = coverage_start_date_array[0] f3l_data['coverageStartDateDay'] = coverage_start_date_array[1] f3l_data['coverageStartDateYear'] = coverage_start_date_array[2] if f3l_data['coverageEndDate'] and len(f3l_data['coverageEndDate']) > 0: coverage_end_date_array = f3l_data['coverageEndDate'].split("/") f3l_data['coverageEndDateMonth'] = coverage_end_date_array[0] f3l_data['coverageEndDateDay'] = coverage_end_date_array[1] f3l_data['coverageEndDateYear'] = coverage_end_date_array[2] # checking for signed date, it is only available for submitted reports if f3l_data['date_signed'] and len(f3l_data['date_signed']) > 0: date_signed_array = f3l_data['date_signed'].split("-") f3l_data['dateSignedMonth'] = date_signed_array[0] f3l_data['dateSignedDay'] = date_signed_array[1] f3l_data['dateSignedYear'] = date_signed_array[2] # build treasurer name to map it to PDF template treasurer_full_name = [] treasurer_full_name.append(f3l_data['treasurerLastName']) treasurer_full_name.append(f3l_data['treasurerFirstName']) treasurer_full_name.append(f3l_data['treasurerMiddleName']) treasurer_full_name.append(f3l_data['treasurerPrefix']) treasurer_full_name.append(f3l_data['treasurerSuffix']) f3l_data['treasurerFullName'] = ",".join(map(str, treasurer_full_name)) f3l_data['treasurerName'] = f3l_data['treasurerLastName'] + "," + f3l_data['treasurerFirstName'] f3l_data['efStamp'] = '[Electronically Filed]' # checking if json contains summary details, for individual transactions print there wouldn't be summary if len(f3l_summary) > 0: total_no_of_pages = 1 f3l_data_summary_array = [f3l_data, f3l_summary] if 'memoText' in f3l_data and f3l_data['memoText']: total_no_of_pages += 1 else: f3l_data_summary_array = [f3l_data] f3l_data_summary = {i: j for x in f3l_data_summary_array for i, j in x.items()} # process all schedules and build the PDF's process_output, total_no_of_pages = process_schedules(f3l_data, md5_directory, total_no_of_pages) has_sa_schedules = process_output.get('has_sa_schedules') has_sb_schedules = process_output.get('has_sb_schedules') if len(f3l_summary) > 0: get_summary_detail(f3l_summary, f3l_data, f3l_data_summary) f3l_data_summary['PAGESTR'] = "PAGE " + str(page_no) + " / " + str(total_no_of_pages) pypdftk.fill_form(infile, f3l_data_summary, outfile) shutil.copy(outfile, md5_directory + 'F3L_Summary.pdf') os.remove(md5_directory + json_file_md5 + '_temp.pdf') # Memo text changes if 'memoText' in f3l_data_summary and f3l_data_summary['memoText']: memo_dict = {} temp_memo_outfile = md5_directory + 'F3L_Summary_memo.pdf' memo_infile = current_app.config['FORM_TEMPLATES_LOCATION'].format('TEXT') memo_dict['scheduleName_1'] = 'F3L' + f3l_data_summary['amendmentIndicator'] memo_dict['memoDescription_1'] = f3l_data_summary['memoText'] memo_dict['PAGESTR'] = "PAGE " + str(2) + " / " + str(total_no_of_pages) pypdftk.fill_form(memo_infile, memo_dict, temp_memo_outfile) pypdftk.concat([md5_directory + 'F3L_Summary.pdf', temp_memo_outfile], md5_directory + json_file_md5 + '_temp.pdf') shutil.copy(md5_directory + json_file_md5 + '_temp.pdf', md5_directory + 'F3L_Summary.pdf') os.remove(md5_directory + json_file_md5 + '_temp.pdf') # check if all_pages already exsits if os.path.exists(md5_directory + 'all_pages.pdf'): os.remove(md5_directory + 'all_pages.pdf') # checking for sa transactions if has_sa_schedules: pypdftk.concat([md5_directory + 'F3L_Summary.pdf', md5_directory + 'SA/all_pages.pdf'], md5_directory + 'all_pages.pdf') os.remove(md5_directory + 'SA/all_pages.pdf') shutil.rmtree(md5_directory + 'SA') else: shutil.copy(md5_directory + 'F3L_Summary.pdf', md5_directory + 'all_pages.pdf') # checking for sb transactions if has_sb_schedules: pypdftk.concat([md5_directory + 'all_pages.pdf', md5_directory + 'SB/all_pages.pdf'], md5_directory + 'temp_all_pages.pdf') shutil.move(md5_directory + 'temp_all_pages.pdf', md5_directory + 'all_pages.pdf') os.remove(md5_directory + 'SB/all_pages.pdf') shutil.rmtree(md5_directory + 'SB') else: # no summary, expecting it to be from individual transactions if has_sa_schedules: if os.path.exists(md5_directory + 'all_pages.pdf'): os.remove(md5_directory + 'all_pages.pdf') shutil.move(md5_directory + 'SA/all_pages.pdf', md5_directory + 'all_pages.pdf') else: shutil.move(md5_directory + 'SA/all_pages.pdf', md5_directory + 'all_pages.pdf') shutil.rmtree(md5_directory + 'SA') if has_sb_schedules: if os.path.exists(md5_directory + 'all_pages.pdf'): os.remove(md5_directory + 'all_pages.pdf') shutil.move(md5_directory + 'SB/all_pages.pdf', md5_directory + 'all_pages.pdf') else: shutil.move(md5_directory + 'SB/all_pages.pdf', md5_directory + 'all_pages.pdf') shutil.rmtree(md5_directory + 'SB') # push output file to AWS s3 = boto3.client('s3') s3.upload_file(md5_directory + 'all_pages.pdf', current_app.config['AWS_FECFILE_COMPONENTS_BUCKET_NAME'], md5_directory + 'all_pages.pdf', ExtraArgs={'ContentType': "application/pdf", 'ACL': "public-read"}) response = { # 'file_name': '{}.pdf'.format(json_file_md5), 'pdf_url': current_app.config['PRINT_OUTPUT_FILE_URL'].format(json_file_md5) + 'all_pages.pdf' } # return response if flask.request.method == "POST": envelope = common.get_return_envelope( data=response ) status_code = status.HTTP_201_CREATED return flask.jsonify(**envelope), status_code else: if flask.request.method == "POST": envelope = common.get_return_envelope( 'false', 'JSON file is missing from your request' ) status_code = status.HTTP_400_BAD_REQUEST return flask.jsonify(**envelope), status_code
app = Flask(__name__) app.secret_key = 'thisIsTheSecretKeyAYYYY' app.config['SESSION_TYPE'] = 'filesystem' app.config['TESTING'] = True app.config['TEMPLATES_AUTO_RELOAD'] = True times = { 'short_term': 'the Last Month', 'medium_term': 'the Last 6 Months', 'long_term': 'All Time' } SPOTIFY_AUTH_URL = "https://accounts.spotify.com/authorize" CLIENT = json.load(open('conf.json', 'r+')) CLIENT_ID = CLIENT['id'] CLIENT_SECRET = CLIENT['secret'] SCOPE = "user-read-private user-top-read playlist-modify-public playlist-modify-private user-read-email playlist-read-private" REDIRECT_URI = CLIENT['redirect_uri'] # REDIRECT_URI = 'http://myspotstats.herokuapp.com/callback' # uncomment for heroku production # REDIRECT_URI = 'http://www.myspotstats.com/callback' # uncomment for live domain @app.route('/') def home(): """ Home page, redirects to other pages based on user input """ if 'auth_header' in session:
def get(self, country_name): f = open(config['DATA_PATH'] + '/' + country_name + '.json', 'r') country = json.load(f) response = jsonify(master_data=country[unicode('Master_Data')]) response.status_code = 200 return response
# import compare_face_cloud from flask import Flask, request, redirect, session, url_for, Response, json, render_template, send_from_directory from werkzeug.utils import secure_filename from flask.json import jsonify from pymongo import MongoClient from flask_cors import CORS from google.cloud import datastore from google.cloud import vision from google.cloud import storage import os import recognizerimage # import faceutils with open('credentials.json', 'r') as f: creds = json.load(f) mongostr = creds["mongostr"] client = MongoClient(mongostr) db = client["remind"] ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'} app = Flask(__name__) app.config.from_object(__name__) CORS(app) def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
jinja_options = Flask.jinja_options.copy() jinja_options.update( dict(block_start_string='{%', block_end_string='%}', variable_start_string='((', variable_end_string='))', comment_start_string='{#', comment_end_string='#}')) ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) STATIC_PATH = os.path.join(ROOT_PATH, 'client/dist') app = VueFlask(__name__, static_folder=STATIC_PATH, static_url_path='') with open('./mysql.json') as credential: cred = json.load(credential) def connection(): return pymysql.connect(host='localhost', user='******', password=cred['password'], charset='utf8mb4', db='todolist', cursorclass=pymysql.cursors.DictCursor) def dataList(): conn = connection() try: with conn.cursor() as cursor:
def get(self, country_name): f = open(config['DATA_PATH'] + '/' + country_name + '.json', 'r') country = json.load(f) response = jsonify(description=country[unicode('Description')]) response.status_code = 200 return response
def get_course_information(course_code): # data = request.data with open(DATABASE + "/course_information_succeed.json") as f: data = json.load(f) return jsonify(data), 200
def get_courses(): with open(DATABASE + "/courses.json") as f: data = json.load(f) return jsonify(data), 200
def albums_json(): albums_info = os.path.join(app.static_folder, 'data', 'albums.json') with open(albums_info, 'r') as json_data: json_info = json.load(json_data) return jsonify(json_info)
def get_skills(): with open(DATABASE + "/skills.json") as f: data = json.load(f) return jsonify(data), 200
def perks(): response_object = {'status': 'success'} with open("server/data/perks.json", "r") as text_file: response_object['perks'] = json.load(text_file)['perks'] return jsonify(response_object)
def __init__(self, path): #: The theme's root path. All the files in the theme are under this #: path. self.path = os.path.abspath(path) with open(os.path.join(self.path, 'info.json')) as fd: self.info = i = json.load(fd) #: The theme's name, as given in info.json. This is the human #: readable name. self.name = i['name'] #: The application identifier given in the theme's info.json. Your #: application will probably want to validate it. self.application = i['application'] #: The theme's identifier. This is an actual Python identifier, #: and in most situations should match the name of the directory the #: theme is in. self.identifier = i['identifier'] #: The human readable description. This is the default (English) #: version. self.description = i.get('description') #: This is a dictionary of localized versions of the description. #: The language codes are all lowercase, and the ``en`` key is #: preloaded with the base description. self.localized_desc = dict( (k.split('_', 1)[1].lower(), v) for k, v in i.items() if k.startswith('description_') ) self.localized_desc.setdefault('en', self.description) #: The author's name, as given in info.json. This may or may not #: include their email, so it's best just to display it as-is. self.author = i['author'] #: A short phrase describing the license, like "GPL", "BSD", "Public #: Domain", or "Creative Commons BY-SA 3.0". self.license = i.get('license') #: A URL pointing to the license text online. self.license_url = i.get('license_url') #: The URL to the theme's or author's Web site. self.website = i.get('website') #: The theme's preview image, within the static folder. self.preview = i.get('preview') #: The theme's doctype. This can be ``html4``, ``html5``, or ``xhtml`` #: with html5 being the default if not specified. self.doctype = i.get('doctype', 'html5') #: The theme's version string. self.version = i.get('version') #: Any additional options. These are entirely application-specific, #: and may determine other aspects of the application's behavior. self.options = i.get('options', {})