def latest_publish(package, operation): fpath = os.path.join( current_app.config["WWW_ROOT"], "repository", "latest.json" ) latest_publish_obj = _read_json(fpath, []) if hasattr(package, "publisher"): publisher = package.publisher else: publisher = "anonymous" latest_publish_obj.insert(0, { "action": operation, "publisher": publisher, "family": package.family, "name": package.name, "version": package.version, "update_at": package.updated_at }) if len(latest_publish_obj) > current_app.config["LIST_MAX_COUNT"]: latest_publish_obj = latest_publish_obj[0:current_app.config["LIST_MAX_COUNT"]] json.dump(latest_publish_obj, open(fpath, 'w'))
def bulk_classify(files, loaded_model, token): """ Takes a list of files and a model for the CNN, and classifies each file. :param files: List of files to classify :type files: list :param loaded_model: Model loaded from disk :return list: """ #global PROGRESS return_values = [] PROGRESS = {} with open(RESULTS_FOLDER + token + SH + "progress.txt") as infile: PROGRESS = json.load(infile) for i in files: if i[1] != "": return_values.append((CNN(i[0], loaded_model), i[1])) else: return_values.append((i[0], "")) # append token and filename to progress tracking dictionary PROGRESS[token]['classify'] = PROGRESS[token]['classify'] + 1 with open(RESULTS_FOLDER + token + SH + "progress.txt", 'w') as outfile: json.dump(PROGRESS, outfile) return return_values
def bmi(): myName = "" myWeight = 0 myHeight = 1 db.create_all() if request.method == "POST" and 'userName' in request.form: myName = request.form.get("userName") myWeight = float(request.form.get("userWeight")) myHeight = float(request.form.get("userHeight")) if db.session.query(Bmi).filter(Bmi.name_ == myName).count() == 0: bmi = Bmi(myName, myWeight, myHeight) db.session.add(bmi) db.session.commit() return render_template('bmi.html', name=myName, bmi=round(myWeight / (myHeight * myHeight), 1)) else: row = db.session.query(Bmi).filter(Bmi.name_ == myName) nm = row.first().name_ wt = row.first().weight_ ht = row.first().height_ bmiHere = round(wt / (ht * ht), 1) jOut = {"name": nm, "weight": str(wt), "height": str(ht)} # string conversion is required for above decimal values coz decimals are not JSON serializable with open('data.json', 'w') as outfile: json.dump(jOut, outfile) return redirect(url_for('getDropDown')) # return json.dumps(jOut) # return "This name: %s already exists. The BMI is %s" % (nm, bmiHere) # return str(getPostName(myName)) else: return render_template('bmi.html')
def process_compressed(compressed_list, token): """ Takes a list of file paths to compressed folders, extracts them, and returns the file paths of the extracted folder in a list. :param compressed_list: List of file paths :type compressed_list: list :return list: """ #global PROGRESS PROGRESS = {} with open(RESULTS_FOLDER + token + SH + "progress.txt") as infile: PROGRESS = json.load(infile) folder_list = [] for file in compressed_list: if file.endswith((".tar", ".tar.gz")): tf = tarfile.open(file) os.mkdir(file + ".dir" + SH) folder_list.append(file + ".dir" + SH) tf.extractall(file + ".dir" + SH) tf.close() if file.endswith(".zip"): with zipfile.ZipFile(file, 'r') as zf: os.mkdir(file + ".dir" + SH) folder_list.append(file + ".dir" + SH) zf.extractall(file + ".dir" + SH) PROGRESS[token]['extract'] = PROGRESS[token]['extract'] + 1 with open(RESULTS_FOLDER + token + SH + "progress.txt", 'w') as outfile: json.dump(PROGRESS, outfile) return folder_list
def saveLog(): #print"working!!"; content = request.json #print(content.get('time')) outfile = open(fileName+'.txt', 'a') json.dump(content,outfile) outfile.write('\n')
def create_user(username=None, password=None): """ Create user """ user_file = safe_join(app.config['USERS_FOLDER'], username) password = sha256_crypt.encrypt(password) with open(user_file, 'x') as f: dump({'password': password}, f) settings_write(username, 'create', int(time.time()))
def settings_write(name=None, key=None, value=None): """ Write to settings file """ file = safe_join(app.config['SETTINGS_FOLDER'], name + '.json') content = settings_read(name) content[key] = value with open(file, 'w') as f: dump(content, f)
def set_config(): print request.json with open("/home/fchai/server-relay/codegen/config.json", "w") as f: json.dump(request.json, f) os.system( "cd codegen; python3 ./codegen.py; rap deploy -s 10.221.66.14; cd ..") return jsonify({"result": True})
def year_post(year): #render_template('form.html') if request.method == 'POST': #req_data=request.get_json() year = request.form["year"] category = request.form["category"] my_id = request.form["id"] firstname = request.form["firstname"] surname = request.form["surname"] motivation = request.form["motivation"] nobel_year = { 'year': year, 'category': category, 'laureates': [{ 'id': id, 'firstname': firstname, 'surname': surname, 'motivation': motivation }] } with open('./static/nobel.json', 'r+') as file: file_data = json.load(file) file_data['prizes'].append(nobel_year) file.seek(0) json.dump(file_data, file, indent=4) return redirect(url_for("nobel_year", year=year)) else: return render_template("form.html")
def test_get_secrets(self): """Vanilla test case for get_secrets Utility function for reading secrets from configured location """ # Setting up config object tc = UtilsTestConfig() with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as temp_secrets_file: json.dump(self.test_secrets, temp_secrets_file) temp_secrets_file.flush() tc.SECRETS_FILE = temp_secrets_file.name current_app.config.from_object(tc) util.flush_caches() secrets = util.get_secrets() secrets2 = util.get_secrets() secrets3 = util.get_secrets(flush_cache=True) self.assertDictEqual( secrets, self.test_secrets, "get_secrets not reading secrets from configured location") # Checking that cache (and its flush) is working as expected self.assertIs(secrets, secrets2, "get_secrets cache is not working as expected") self.assertIsNot(secrets, secrets3, "get_secrets cache flush is not happening")
def Register_page1(): # when enter 127.0.0.1:5000/registration directly open registration.html if request.method == "POST": attempted_username = request.form['Username'] attempted_password = request.form['Password'] attempted_email= request.form['Email'] d = {"email": "", "pass": "", "user": ""} with open('data.json', 'r') as outfile: data = json.load(outfile) print(data) # json.load(d, outfile) # d = {"email": "", "pass": "", "user": ""} d['user']=attempted_username d['pass']=attempted_password d['email']=attempted_email data.append(d) with open('data.json', 'w') as outfile: json.dump(data, outfile, indent=4) if request.form['Username'] == '' or request.form['Password'] == '' or request.form['Email'] == '': return 'Invalid Credentials. Please try again.' else: return render_template('login.html') else: return render_template('registration.html')
def ifNoUserJsonFillWithDefault(path, numberOfUsers): """If there isn't a user json file here we need to make it and fill it with default values so that it can be used. """ CHECK_FOLDER = os.path.isfile(path) # If folder doesn't exist, then create it. if not CHECK_FOLDER: jsonDefaultData = "{\"users\": []}" #make the file with open(path, 'w') as f: f.write(jsonDefaultData) f.close() #load the file dataJson, filePath = loadJsonDataObj(path) #append default data into it for i in range(0, numberOfUsers): dataJson['users'].append({ "id": (i + 1), "elo": 0, "correctMatches": 0, "incorrectMatches": 0 }) #save the file with the new data in it with open(filePath, 'w') as outfile: json.dump(dataJson, outfile) outfile.close()
def save(self, form): """Accepts a web input form from a request. Attmpts to store the content of the form within the file specified by 'settingsfile'. Returns: dict('status': 'OK') if app was able to write out the settings file. dict('status': '') if the app was not able to write out the settings.""" app.logger.debug(form) try: settings_dir = os.path.dirname(form["settingsfile"]) if not os.path.exists(settings_dir): os.makedirs(settings_dir) # overwrite any previous settings. json.dump(form, open(form["settingsfile"], "w")) except Exception as e: app.logger.debug(e) return {"data": {}, "status": "WRITE_FAIL"} try: # try to apply the settings back onto self. self.load(form["settingsfile"]) except: return {"data": {}, "status": "LOAD_FAIL"} # all is good, return OK return {"data": form, "status": "OK"}
def sync_interaction_model(self): """ Generates a JSON representation of the Skill Interaction Model JSON, this is not 100% complete model but is a starting point, it can be copy/pasted in ASK Console JSON Editor as a starting point """ if not self.impath: return try: out = open(self.impath, 'w', encoding='utf-8') #TODO: support types json.dump( { "interactionModel": { "languageModel": { "invocationName": self.invocation_name, "intents": list(self._gen_im_intents()), "types": [] } } }, out, indent=4) out.close() logger.debug("Synced interaction model to : %s" % self.impath) except: logger.warn("Failed synching interaction model to : %s" % self.impath, exc_info=True)
def test_get_user_secrets(self): """Vanilla test case for get_user_secrets Utility function for reading user secrets from configured location """ # Setting up config object tc = UtilsTestConfig() with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as temp_secrets_file, \ tempfile.NamedTemporaryFile(mode="w", suffix=".json") as temp_user_secrets_file: json.dump(self.test_secrets, temp_secrets_file) json.dump(self.test_user_secrets, temp_user_secrets_file) temp_secrets_file.flush() temp_user_secrets_file.flush() tc.SECRETS_FILE = temp_secrets_file.name tc.USER_SECRETS_FILE = temp_user_secrets_file.name # *not* reusing the temp file tc.USER_SECRETS_SALT_KEY = self.test_secret_key current_app.config.from_object(tc) util.flush_caches() util.get_secrets(flush_cache=True) util.get_user_secrets(flush_cache=True) util.secure_mode(flush_cache=True) user_secrets = util.get_user_secrets() self.assertDictEqual(user_secrets, self.test_user_secrets, "User secrets not loaded correctly")
def Recommend(): _movie1 = str(request.form['movie1']) _movie2 = str(request.form['movie2']) _movie3 = str(request.form['movie3']) _movie4 = str(request.form['movie4']) _movie5 = str(request.form['movie5']) _rate1 = int(request.form['rate1']) _rate2 = int(request.form['rate2']) _rate3 = int(request.form['rate3']) _rate3 = int(request.form['rate4']) _rate3 = int(request.form['rate5']) # # if _movie1 and _movie2 and _movie3 and _rate1 and _rate2 and _rate3: # return json.dumps({'html':'<span>All fields good !!</span>'}) # else: # return json.dumps({'html':'<span>Enter the required fields</span>'}) data = [{"title1" : _movie1, "imgUrl1" : "http://webneel.com/daily/sites/default/files/images/daily/02-2013/11-hard-candy-creative-movie-poster-design.jpg", "url1" : "http://www.amazon.com", "title2" : _movie2, "imgUrl2" : "http://webneel.com/sites/default/files/images/blog/thumb-movipos.jpg", "url2" : "http://www.amazon.com", "title3" : _movie3, "imgUrl3" : "http://webneel.com/daily/sites/default/files/images/daily/02-2013/6-big-fish-creative-movie-poster-design.jpg", "url3" : "http://www.amazon.com"}] # Writing JSON data with open('static/js/data.json', 'w') as f: json.dump(data,f) return render_template('index.html')
def most_depended_upon(package, operation): dependencies = package.get('dependencies', None) if not dependencies: return fpath = os.path.join( current_app.config["WWW_ROOT"], "repository", "depend.json" ) depended_obj = _read_json(fpath, {}) if isinstance(package, Package): p = str(package) else: p = '%s/%s@%s' % (package['family'], package['name'], package['version']) for dep in dependencies: if '@' not in dep or '/' not in dep: continue if depended_obj.has_key(dep): if p not in depended_obj[dep]: depended_obj[dep].append(p) else: depended_obj[dep] = [p] json.dump(depended_obj, open(fpath, 'w'))
def saveDB(file_path, data_length): ## 데이터를 저장할 변수 dataAnalysis = dict() ## 데이터 저장 for i in range(0, data_length): ### key 설정 Name = dict() ### value 설정 Name["name"] = namelist[i] Name["context"] = ctxlist[i] # Name["address"] = address[i] # Name["bigRegion"] = bigRegion[i] # Name["smallRegion"] = smallRegion[i] # Name["phone"] = phone[i] # Name["industry"] = industry[i] # Name["classify"] = classify[i] # Name["confidence"] = confidence[i] ### json 저장 dataAnalysis[str(namelist[i])] = Name ## 파일 저장 with open(file_path, 'w', encoding='utf-8') as make_file: json.dump(dataAnalysis, make_file, indent="\t")
def cat0_js(id): if request.method == 'POST': if request.form.get('submit_a'): data['JokeCollection'][0]['catJokes'][id]['like'] = data['JokeCollection'][0]['catJokes'][id]['like']+1 with open('jokeCollection.json', 'w') as fp: json.dump(data, fp) onejoke = data['JokeCollection'][0]['catJokes'][id]['content'] like = data['JokeCollection'][0]['catJokes'][id]['like'] dislike = data['JokeCollection'][0]['catJokes'][id]['dislike'] return render_template('onejoke.html', like=like, dislike=dislike, onejoke=onejoke) elif request.form.get('submit_b'): data['JokeCollection'][0]['catJokes'][id]['dislike'] = data['JokeCollection'][0]['catJokes'][id]['dislike']+1 with open('jokeCollection.json', 'w') as fp: json.dump(data, fp) onejoke = data['JokeCollection'][0]['catJokes'][id]['content'] like = data['JokeCollection'][0]['catJokes'][id]['like'] dislike = data['JokeCollection'][0]['catJokes'][id]['dislike'] return render_template('onejoke.html', like=like, dislike=dislike, onejoke=onejoke) #return render_template('onejoke.html', onejoke=onejoke) else: like = data['JokeCollection'][0]['catJokes'][id]['like'] dislike = data['JokeCollection'][0]['catJokes'][id]['dislike'] onejoke = data['JokeCollection'][0]['catJokes'][id]['content'] return render_template('onejoke.html', like=like, dislike=dislike, onejoke=onejoke)
def Comment(): postID = request.args.get('comment', '') if not session.get('logged_in'): return login() else: if request.method == 'POST': SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "everything.json") url = url_for('static',filename='csstest.css') image = url_for('static',filename='logo1.png') ro = open(json_url, "r") description = request.form['uplDescription'] description1 = description.replace('\n', '<br>') user = session.get('CURRENT_USER') comment = {'author':user['username'], 'description':description1} data = json.loads(ro.read()) for post in data["posts"]: if int(post["id"]) == int(postID): post["comments"].append(comment) with open(json_url, 'w') as f: json.dump(data, f) return redirect('/all/') else: url = url_for('static',filename='csstest.css') image = url_for('static',filename='logo1.png') type = 'comment' return render_template('uplTemplate.html',type=type, csssheet = url, image = image,user = session.get('CURRENT_USER'))
def transmissionMatToOmt(temp_dir): ''' Convert a MATPOWER .mat or .m input into a JSON .omt transmission circuit format and return the .omt. Form parameters: :param matpower: a MATPOWER .mat file. Details: :OMF function: omf.network.parse() :run-time: maybe a couple minutes. ''' mat_path = os.path.join(temp_dir, "input.mat") request.files["matpower"].save(mat_path) omt_json = network.parse(mat_path, filePath=True) if omt_json == { "baseMVA": "100.0", "mpcVersion": "2.0", "bus": {}, "gen": {}, "branch": {} }: raise Exception( "The submitted .m file was invalid or could not be parsed correctly." ) nxG = network.netToNxGraph(omt_json) omt_json = network.latlonToNet(nxG, omt_json) with open(os.path.join(temp_dir, filenames["tmomt"]), 'w') as f: json.dump(omt_json, f)
def Del(): postID = request.args.get('delete', '') if not session.get('logged_in'): return login() else: search = False SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "everything.json") url = url_for('static',filename='csstest.css') image = url_for('static',filename='logo1.png') ro = open(json_url, "r") user = session.get('CURRENT_USER') data = json.loads(ro.read()) for post in data["posts"]: if int(post["id"]) == int(postID): if user["username"] == post["author"]: del data["posts"][int(postID)] search = True if search == True: with open(json_url, 'w') as f: json.dump(data, f) return redirect('/all/') else: url = url_for('static',filename='csstest.css') image = url_for('static',filename='logo1.png') SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "everything.json") ro = open(json_url, "r") data = json.loads(ro.read()) title = "Wrong user logged in" result = "You cannot delete this post." return render_template('template2.html', title = title, result = result, csssheet = url, image = image,user=session.get('CURRENT_USER'))
def Unfollow(): Suser = request.args.get('unfollow', '') if not session.get('logged_in'): return redirect('/login/') else: search = False SITE_ROOT = os.path.realpath(os.path.dirname(__file__)) json_url = os.path.join(SITE_ROOT, "static", "everything.json") url = url_for('static',filename='csstest.css') image = url_for('static',filename='logo1.png') ro = open(json_url, "r") user = session.get('CURRENT_USER') data = json.loads(ro.read()) for dUser in data["users"]: if dUser["username"] == session.get('CURRENT_USER')['username']: for follows in dUser["following"]: if follows == Suser: dUser["following"].remove(Suser) session['CURRENT_USER'] = dUser for fUser in data["users"]: if fUser["username"] == follows: fUser['followers']-=1 with open(json_url, 'w') as f: json.dump(data, f) return redirect('/user/?user='+Suser)
def update(): injson = request.get_json() index = request.args.get('index') fileId = request.args.get('infile') sub = request.args.get('sub') newFile = request.args.get('newFile') path = os.path.join(root_dir(), f"tmp/generated/{fileId}.json") with open(path, 'r') as json_file: tempJson = json_file.read() tempJson = json.loads(tempJson) if newFile == "false": if not ("NAME" in tempJson[-1]): tempJson.pop(-1) if sub == "update": tempJson[int(index)] = injson with open(path, 'w') as json_file: json.dump(tempJson, json_file) if sub == "add": print(newFile) if newFile == "true": if tempJson[0]['NAME'] == None: tempJson[0] = injson else: tempJson.append(injson) else: tempJson.append(injson) with open(path, 'w') as json_file: json.dump(tempJson, json_file) return (jsonify(tempJson))
def register(file, title, author, year, image): #read the file provided, create a json file, update the books.json meta file new_book = dict() with app.open_resource(BOOK_META) as f: books = json.load(f) book = get_book_details(file) if books["books"]: new_id = books["books"][-1]["id"] + 1 else: new_id = 1 new_book["id"] = new_id new_book["title"] = title new_book["file"] = file new_book["author"] = author new_book["image"] = image new_book["year"] = year books["books"].append(new_book) with open(BOOK_META, 'w') as f: json.dump(books, f) output = (title, new_id) click.echo("%s Registered\nID: %d" % output)
def download_data(): data = { 'users': [u.to_dict(incl_checkins=True) for u in User.query.all()], 'locations': [l.to_dict(incl_checkins=True) for l in Location.query.all()], 'checkins': [ c.to_dict(incl_user=True, incl_location=True) for c in Checkin.query.all() ] } path = os.path.join(current_app.config['EXPORT_FOLDER'], 'export.json') if not os.path.exists(current_app.config['EXPORT_FOLDER']): os.makedirs(current_app.config['EXPORT_FOLDER']) elif os.path.exists(path): os.remove(path) with open(path, 'w+') as f: json.dump(data, f, indent=4) return send_file(path, mimetype='application/json', attachment_filename='export.json', as_attachment=True, cache_timeout=-1)
def your_url(): if request.method == 'POST': urls = {} if os.path.exists('urls.json'): with open('urls.json') as urls_file: urls = json.load(urls_file) if request.form['code'] in urls.keys(): flash( 'That shortname has already been taken. Please select another name' ) return redirect(url_for('home')) if 'url' in request.form.keys(): urls[request.form['code']] = {'url': request.form['url']} else: f = request.files['file'] full_name = request.form['code'] + secure_filename(f.filename) f.save( 'C:/Users/LeandroBrito/Documents/GitHub/url-shortner/static/user_files/' + full_name) urls[request.form['code']] = {'file': full_name} with open('urls.json', 'w') as url_file: json.dump(urls, url_file) session[request.form['code']] = True return render_template('your-url.html', code=request.form['code']) else: return redirect(url_for('home'))
def get_cities(): cur = get_cur(datasource) sql = ("SELECT * FROM ciudad") rows = cur.execute(sql) ciudades = {} i = 0 for row in cur.fetchall(): ciudad = { 'idCiudad': row[0], 'ciudad': unicode(row[1], errors='replace'), 'codigo_ciudad': row[2], 'latitud': row[3], 'longitud': row[4], } ciudades[i] = ciudad i += 1 with open('data/ciudades.json', 'w') as file: json.dump(ciudades, file)
def register_camera(): input_json = request.get_json() logging.info('Request from CMS: {}'.format(input_json)) camera_name = input_json['name'] camera_type = input_json['type'] with open("conf.json") as conf_json: data = json.load(conf_json) logging.info('Available cameras: {}'.format(data)) data["cameras"].append({ "name": camera_name, "type": camera_type, "streamingEngines": [] }) logging.info('Cameras updated: {}'.format(data)) with open("conf.json", "w") as conf_json: json.dump(data, conf_json) update_nginx(data) ma_ip = os.getenv(get_ma_ip()) response = {} response[ "endpoint"] = "rtmp://{ma_ip}:1935/{camera_name}/{camera_name}".format( ma_ip=ma_ip, camera_name=camera_name) return json.dumps(response, sort_keys=False)
def get_stream(): input_json = request.get_json() logging.info('Request from CMS: {}'.format(input_json)) stream_app = input_json["name"] streaming_engine_IP = input_json["se_ip"] print(streaming_engine_IP) streaming_engine_IP = streaming_engine_IP.split(':')[0] with open("conf.json") as conf_json: data = json.load(conf_json) logging.info('Available cameras: {}'.format(data)) for camera in data['cameras']: if camera['name'] == stream_app: camera['streamingEngines'].append(streaming_engine_IP) logging.info('Cameras updated: {}'.format(data)) with open("conf.json", "w") as conf_json: json.dump(data, conf_json) update_nginx(data) response = {} response[ "url"] = 'http://{streaming_engine_IP}:80/hls/{stream_app}.m3u8'.format( streaming_engine_IP=streaming_engine_IP, stream_app=stream_app) return json.dumps(response, sort_keys=False)
def create_store(): request_data = request.get_json() # request made to this endpoint. new_store = {'name': request_data['name'], 'items': []} stores.append(new_store) with open('test.json', 'w') as outfile: json.dump(stores, outfile) return jsonify(new_store)
def create(): request_data = json.loads(request.data) query = {"stock": request_data} with open('/Users/antonioonwu/stonkstop/src/Backend/Query.json', 'w') as outfile: json.dump(query, outfile, indent=4) return query
def setup_device(): """ Sets up garage : { "service_key_blob": "...", "in_sub": "...", "out_sub": "...", "ssid": "...", "psk": "..." } """ data = request.get_json() service_key_blob = data['service_key_blob'] ssid = data['ssid'] psk = data['psk'] in_sub = data['in_sub'] out_sub = data['out_sub'] # TODO: Check validity of params device_config = { "in_subscription": in_sub, "out_subscription": out_sub, 'ssid': ssid, 'psk': psk } settings_file_name = "%s.tmp" % os.environ.get('SETTINGS_FILE') with open(settings_file_name, 'w', encoding='utf-8') as f: json.dump(device_config, f, ensure_ascii=False, indent=4) service_file_name = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') with open(service_file_name, 'w', encoding='utf-8') as f: f.write(base64.b64decode(service_key_blob).decode('utf-8')) return "OK", 200
def gridlabdToGfm(temp_dir): '''Data Params: {glm: [file], other_inputs: see source} OMF function: omf.models.resilientDist.convertToGFM() Runtime: should only be a couple seconds. Result: Convert the GridLAB-D model to a GFM model. Return the new id for the converted model. Note that this is not the main fragility model for GRIP.''' fName = 'in.glm' f = request.files['glm'] glmPath = os.path.join(temp_dir, fName) f.save(glmPath) gfmInputTemplate = { 'phase_variation': float(request.form.get('phase_variation')), 'chance_constraint': float(request.form.get('chance_constraint')), 'critical_load_met': float(request.form.get('critical_load_met')), 'total_load_met': float(request.form.get('total_load_met')), 'maxDGPerGenerator': float(request.form.get('max_dg_per_generator')), 'dgUnitCost': float(request.form.get('dg_unit_cost')), 'generatorCandidates': request.form.get('generator_candidates'), 'criticalLoads': request.form.get('critical_loads') } for key, val in gfmInputTemplate.items(): if val is None: raise Exception( ("gridlabdToGfm was expecting a valid value for key: {key}," "but it received value: {value}").format(key=key, value=val)) feederModel = { 'nodes': [], # Don't need these. 'tree': omf.feeder.parse(glmPath) } gfmDict = omf.models.resilientDist.convertToGFM(gfmInputTemplate, feederModel) with open(os.path.join(temp_dir, filenames["glgfm"]), 'w') as f: json.dump(gfmDict, f)
def generate_swagger(app_name, instance, fp): """Initialize an instance of the application API, then use its context to generate a json schema and dump to disk as 'swagger.json' """ app_loc = app_name + '_app' inst_loc = instance + '_instance' # Dynamically generate module names for import import_statement = "from {}.application import create_application, get_api" exec(import_statement.format(app_loc)) print( 'Generating swagger.json file for {} instance of {} application'.format(inst_loc, app_name) ) app = create_application(os.path.abspath(inst_loc)) api = get_api() # Required to make sure the app is able to create a URL adapter for # request independent URL generation. Otherwise a RuntimeError is # raised app.config['SERVER_NAME'] = 'localhost' with app.app_context(): with open(fp, 'wb') as fh: json.dump(api.__schema__, fh)
def put(layout_id, turnout_id): path = os.path.dirname(__file__) + '/' + layout_id + '.turnouts.json' with open(path) as turnout_file: data = json.load(turnout_file) turnouts = [ turnout for turnout in data if turnout['turnoutId'] == turnout_id ] if len(turnouts) == 0: abort(404) turnout = turnouts[0] if not request.json: abort(400) if 'current' in request.json and type(request.json['current']) is not int: abort(400) if 'straight' in request.json and type( request.json['straight']) is not int: abort(400) if 'divergent' in request.json and type( request.json['divergent']) is not int: abort(400) # save all keys for key in request.json: turnout[key] = request.json.get(key, turnout[key]) with open(path, 'w') as turnout_file: json.dump(data, turnout_file) return jsonify(turnout)
def save_to_json(_object): objects_from_json = load_objects_from_json() objects_from_json.append(_object) dicts_from_json = make_dicts_from_object_list(objects_from_json) with open(json_backend, "w") as f: json.dump(dicts_from_json, f)
def frequency_calculation(): output = [] uniq_set = set() file_name = download() data = json.load(open(file_name, 'r')) for each in data: frequency = 0 for process_data in data: if each.get('practice_doctor_profile_id') == process_data.get('practice_doctor_profile_id') and each.get('observations') == process_data.get('observations') and each.get('complaints') == process_data.get('complaints') and each.get('diagnoses') == process_data.get('diagnoses') and each.get('DrugName') == process_data.get('DrugName'): frequency += 1 if frequency > 0: temp = '' if each.get('observations'): temp += each['observations'].strip() if each.get('diagnoses'): temp += each['diagnoses'].strip() if each.get('complaints'): temp += each['complaints'].strip() if each.get('practice_doctor_profile_id'): temp += each['practice_doctor_profile_id'].strip() if each.get('DrugName'): temp += each['DrugName'] if temp in uniq_set: continue else: uniq_set.add(temp) each['frequency'] = frequency output.append(each) json.dump(output, open('/tmp/drug_freq.json', 'w'), indent=4, sort_keys=False)
def zip_items(result_id, items_ids): result_id = ObjectId(result_id) archive_service = get_resource_service('archive') vppzip_service = get_resource_service('verifiedpixel_zip') results_service = get_resource_service('verification_results') item = vppzip_service.find_one(_id=result_id, req=None) vppzip_service.system_update(result_id, {'status': "processing"}, item) items = list(archive_service.get_from_mongo( req=ParsedRequest(), lookup={'_id': {'$in': items_ids}})) verification_ids = [item['verification']['results'] for item in items] verification_results = { result['_id']: result for result in list(results_service.get_from_mongo( req=ParsedRequest(), lookup={'_id': {'$in': verification_ids}}) ) } verification_data_object = StringIO() verification_data = {} zip_file_object = BytesIO() zip_file = zipfile.ZipFile(zip_file_object, mode='w') for item in items: item_id = item['_id'] image = get_original_image(item, 'archive')[1] zip_file.writestr(item_id, image) item['verification']['results'] = verification_results[ item['verification']['results'] ] for field in ['_id', '_etag', '_created', '_updated']: del item['verification']['results'][field] verification_data[item_id] = item['verification'] json.dump(verification_data, verification_data_object) zip_file.writestr('verification.json', verification_data_object.getvalue()) zip_file.close() uploaded_zip_id = app.media.put( zip_file_object.getvalue(), filename="{name}_{date}.zip".format( name=items[0].get('slugline', None) or 'image' if len(items) == 1 else 'images', date=datetime.now().isoformat() ), content_type='application/zip', resource=vppzip_service.datasource, metadata={} ) uploaded_zip_url = url_for_media(uploaded_zip_id) item = vppzip_service.find_one(_id=result_id, req=None) vppzip_service.system_update(result_id, { "status": "done", "result": uploaded_zip_url, "result_id": uploaded_zip_id }, item) push_notification( 'verifiedpixel_zip:ready', id=str(result_id), url=uploaded_zip_url )
def post(self): chosen = flask.request.form['Tasks'] with open('todo.json','rb') as fp: todoList = dict(json.load(fp)) todoList[chosen] = "COMPLETED" with open('todo.json','wb') as fp: json.dump(todoList,fp) return flask.redirect(flask.url_for('index'))
def set_macro_definition(macro_json, macro_name=None): setup_macro_conf_dir() ds_name = macro_json.keys()[0] if macro_name is None: macro_name = ds_name macro_fn = "{}.json".format(macro_name) with open(os.path.join(MACRO_CONF_DIR, macro_fn), 'w') as f: json.dump(macro_json[ds_name], f)
def post(self): with open('todo.json','rb') as fp: todoList = dict(json.load(fp)) task = flask.request.form['description'] dateToFinish = flask.request.form['finishBy'] todoList[task] = dateToFinish with open('todo.json','wb') as fp: json.dump(todoList,fp) flask.flash("Your task has been successfully added! Redirecting...") return flask.redirect(flask.url_for('addTask'))
def putLocalData(self, data, prefix): localPath = self.__localDataDir + prefix + data['name'] try: f = open(localPath, 'w') json.dump(data, f, indent=4, sort_keys=False) # writes back in pretty printed json form except Exception as e: assert(False), str(e) else: f.close(); return localPath
def save_job(data): # make sure the app is ready app = get_app(data['app_name']) if app['status'] != 'ready': raise ConfigError("The App '%s' is not ready" % data['app_name']) fname = utils.job_json_path(data['app_name'], data['slug']) with open(fname, 'w') as fp: json.dump(data, fp)
def status(): from scripts.status import calculate data = calculate() repo = os.path.join(app.config['WWW_ROOT'], 'repository') with open(os.path.join(repo, 'popular.json'), 'w') as f: json.dump(data['popular'], f) with open(os.path.join(repo, 'latest.json'), 'w') as f: json.dump(data['latest'], f)
def write(self, repository, codename, component, arch, package, version, data): cache_file_path = self.cache_path(repository, codename, component, arch, package, version) try: if not os.path.exists(os.path.dirname(cache_file_path)): os.makedirs(os.path.dirname(cache_file_path)) json.dump(data, open(cache_file_path, "w")) except Exception as e: app.logger.warn("Unable to write cache %s to disk" % cache_file_path) app.logger.warn(e) raise
def save_config(self): config = self.settings.copy() config['SecretKey'] = base64.b64encode(self._secret_key) if len(self._lights) > 0: config['lights'] = [] for light_address in sorted(self._lights): config['lights'].append(self._lights[light_address].serialize()) config_fp = open(os.path.join(self._config_path, 'ISYEcho.json'), 'w') json.dump(config, config_fp, indent=4, separators=(',', ': '), sort_keys=True) config_fp.write('\n') config_fp.close()
def writeComputingResources(self, filterd_executables, outputFolder): computingResources = {} for stubinfo in filterd_executables: if isinstance(stubinfo, StubInfo): computingResources.update({stubinfo.command: {}}) computingResources[stubinfo.command].update({"cores": stubinfo.cores}) computingResources[stubinfo.command].update({"ram": stubinfo.ram}) computingResources[stubinfo.command].update({"walltime": stubinfo.walltime}) with open(os.path.join(outputFolder, 'resources.txt'), 'w') as outfile: json.dump(computingResources, outfile)
def save_result(result_id, data): '''Save the given data for the result with the provided id.''' result_data = {k: data.get(k, None) for k in FIELDS} new_file_path = os.path.join(DATA_DIR, "{}.txt".format(result_id)) try: with open(new_file_path, 'w') as write_file: json.dump(result_data, write_file) except EnvironmentError: return {'status': 500, 'error': "Internal Server Error while saving."} return {'status': 200}
def build_gbk(path): pathconfig = build_config(path) path = _new_gbk_file_name(pathconfig) tracks = pathconfig.get('trackPaths') if not tracks: s = StringIO() json.dump(pathconfig, s) raise ValueError("No tracks provided for building GBK: "+s.getvalue()) tracks = genbank.combine_track_files(tracks.split(','), root=_upload_root()) genbank.track_json_to_gbk(pathconfig['filename'], path, tracks) return send_file(path, as_attachment=True)
def signUp(): # create user code will be here!! # read the posted values from th UI _name = request.form['inputName'] _email = request.form['inputEmail'] _password = request.form['inputPassword'] # validate the recieved values if _name and _email and _password: return json.dump({'html':'<span>All fields good !!</span>'}) else: return json.dump({'html':'<span>Enter the required fields</span>'})
def api_save(): if request.headers['Content-Type'] == 'application/json': print("Got POST data " + json.dumps(request.json)) print("Dir name: " + os.path.dirname(os.path.realpath(__file__))) with open('data/app1.json', 'w') as outfile: json.dump(request.json, outfile) data = { 'response' : 'true', 'name' : 'vinay' } js = json.dumps(data) resp = Response(js, status=200, mimetype='application/json') return resp else: return "415 Unsupported Media Type ;)"
def industryGraph(industry): print industry if 'subgraphs' in request.args: subgraphs = request.args["subgraphs"] subgraphs = int(subgraphs) industry = industry.replace("_"," ") response = gpy.main(industry,conn, subgraphs) file = industry.replace(" ", "_") + "_" + str(subgraphs) + ".json" with open(file, "wb") as f: json.dump(response, f) return json.dumps(response, ensure_ascii=False) else: return "PLEASE ADD AN INDUSTRY ARGUMENT"
def write_json_file(info,path): local('touch %s/info.json' % path) # FIXME # Si ensure_ascii=False, ok pour werkzeug et Gunicorn mais plante Foreman (et ptet aussi Supervisor, comme pour les accents du Stream) # Si ensure_ascii=True, ok pour Foreman mais évidement encodage dégueu du fichier info.json ... # ptet un bug similaire de Fabric: <https://github.com/fabric/fabric/issues/815> # local("echo '%s' > %s" % (json.dumps(info, indent=4, ensure_ascii=False), "%s/info.json" % theme.path) ) with open("%s/info.json" % path, 'w') as f: json.dump(info, f, indent=4, ensure_ascii=True) f.closed return info
def _save_permission(self, permission): """save permission to file""" try: config = parse_config(self.resource, permission) with codecs.open(self.permission_json, "w", encoding="utf-8") as f: json.dump(permission, f, indent=4, ensure_ascii=False, sort_keys=True) self.permission = permission self.config = config except IOError as ex: ex.strerror = "can't save permission to file: %s" % ex.strerror raise except AssertionError as ex: raise ValueError(ex.message)
def save_asset(self, name = None, data = None, **kwarg): """ Save static files into out folder """ if name == 'description': f = open(self.asset_by_name(name), 'w') f.write(data) f.close() elif name == 'metadata': f = open(self.asset_by_name(name), 'w') json.dump(json.loads(data), f, indent=2, ensure_ascii=True) # same params as in generatemetadata.py f.close() if kwarg.get('del_new') and kwarg['del_new']: if os.path.exists(self.asset_by_name('metadata_new')): os.remove(self.asset_by_name('metadata_new'))
def zip_items(result_id, items_ids): archive_service = get_resource_service('archive') vppzip_service = get_resource_service('verifiedpixel_zip') results_service = get_resource_service('verification_results') vppzip_service.patch( result_id, {'status': "processing"}, ) items = list(archive_service.get_from_mongo( req=ParsedRequest(), lookup={'_id': {'$in': items_ids}})) verification_ids = [item['verification']['results'] for item in items] verification_results = { result['_id']: result for result in list(results_service.get_from_mongo( req=ParsedRequest(), lookup={'_id': {'$in': verification_ids}}) ) } verification_data_object = StringIO() verification_data = {} zip_file_object = BytesIO() zip_file = zipfile.ZipFile(zip_file_object, mode='w') for item in items: item_id = item['_id'] image = get_original_image(item)[1] zip_file.writestr(item_id, image) item['verification']['results'] = verification_results[ item['verification']['results'] ] for field in ['_id', '_etag', '_created', '_updated']: del item['verification']['results'][field] verification_data[item_id] = item['verification'] json.dump(verification_data, verification_data_object) zip_file.writestr('verification.json', verification_data_object.getvalue()) zip_file.close() uploaded_zip_id = app.media.put( zip_file_object.getvalue(), filename=str(items_ids), content_type='application/zip', resource=vppzip_service.datasource, metadata={} ) uploaded_zip_url = url_for_media(uploaded_zip_id) vppzip_service.patch(result_id, { "status": "done", "result": uploaded_zip_url, "result_id": uploaded_zip_id })
def convert_timezones(): timezones = {} found = set() today = datetime.utcnow() for zone_name in sorted(pytz.all_timezones): tzinfo = dates.get_timezone(zone_name) if tzinfo is None: continue short_name = zone_name try: transition = dates.get_next_timezone_transition(tzinfo, today) except TypeError: continue if transition is None: key = tzinfo.tzname(today) has_dst = False name = dates.get_timezone_name(tzinfo) else: from_tz = transition.from_tz to_tz = transition.to_tz from_tzinfo = transition.from_tzinfo to_tzinfo = transition.to_tzinfo if transition.from_tzinfo.localize(today).dst(): dst_tz = from_tz std_tz = to_tz dst_tzinfo = from_tzinfo std_tzinfo = to_tzinfo else: dst_tz = to_tz std_tz = from_tz dst_tzinfo = to_tzinfo std_tzinfo = from_tzinfo key = '%s/%s' % (std_tz, dst_tz) name = dates.get_timezone_name(std_tzinfo, zone_variation='generic') if name in found: continue found.add(name) timezones[short_name] = { 'short': key, 'name': name } with open('timezones.json', 'w') as f: json.dump({'timezones': timezones}, f, indent=2)
def chronicle_result(result_id, timestamp): '''Chronicle the result to be processed by the grader.''' chronicle = fetch_chronicled_results() if 'error' in chronicle: return chronicle chronicle['chronicle'][result_id] = timestamp try: with open(os.path.join(DATA_DIR, 'grading.chronicle'), 'w') as f: json.dump(chronicle, f) except EnvironmentError: return {'status': 500, 'error': 'Internal Server Error while adding.'} return {'status': 200}
def post(self): with open('todo.json','rb') as fp: todoList = dict(json.load(fp)) taskChoice = int(flask.request.form['taskChoice']) taskChoice = taskChoice-1 keys = todoList.keys() try: key = keys[taskChoice] del todoList[key] with open('todo.json','wb') as fp: json.dump(todoList,fp) flask.flash('Removed.') except: key = 'Please choose one of the options below.' return flask.redirect(flask.url_for('deleteTask'))