def main(): jsonInputs = iter(argv) next(jsonInputs) INPUTS = [] # list which contains for input in jsonInputs: # reads argv json files print("Reading " + input + " ...") assert ".json" in input, "Must input json files!" data = jsonLoad(open(input)) INPUTS.extend(data) print("Found " + str(len(data)) + " inputs.") print("\n" + str(len(INPUTS)) + " total inputs found") # REDIRECTIONS = [x for x in INPUTS if any(y for y in REDIRECTION_TYPES if y in x)] INPUTS = [BaseCommand(x) for x in INPUTS if x] while len(argv) > 1: # unittest.main() gets mad if you pass in argv, so I pop all them off argv.pop() COMBINATIONS = getCombinations(INPUTS) print(str(len(INPUTS)) + " of the inputs generated " + str(len(COMBINATIONS)) + " combinations.") INPUTS = [] # free dat ram for x in COMBINATIONS: #Actually dynamically creates the unit tests generated = dynamicTestingGenerator(x) addCommand(generated) unittest.main()
def load(): root = Tk() root.withdraw() root.filename = filedialog.askopenfilename(title="choose your file", filetypes=[("JSON files", ".json"), ("All files", "*")]) pbackup = None sbackup = None with open(root.filename, 'r') as myfile: data = myfile.read() try: d = jsonLoad(data) pbackup = Point.points[1:] sbackup = Stick.sticks[:] del Point.points[1:] del Stick.sticks[:] for p in d['points']: a = Point(Vector2(p['pos']['x'], p['pos']['y']), p['size'], p['pinned']) if p['isMotor']: a.motorize(Vector2(p['spinPos']['x'], p['spinPos']['y']), p['spinSpeed'], p['currentSpin']) a.currentSpin = p['currentSpin'] for s in d['lines']: b = Stick(Point.points[s['p1']], Point.points[s['p2']], s['visible']) except Exception: messagebox.showinfo("Error", "Invalid JSON") if pbackup and sbackup: Point.points += pbackup Stick.sticks += sbackup
def load_json(input_file): with open(input_file) as data_file: data = jsonLoad(data_file) try: data.pop('comment') except KeyError, e: pass
def load(): root = Tk() root.withdraw() root.filename = filedialog.askopenfilename(title = "choose your file", filetypes = [("JSON files", ".json"),("All files", "*")]) pbackup = None sbackup = None with open(root.filename, 'r') as myfile: data=myfile.read() try: d = jsonLoad(data) pbackup = Point.points[1:] sbackup = Stick.sticks[:] del Point.points[1:] del Stick.sticks[:] for p in d['points']: a = Point(Vector2(p['pos']['x'], p['pos']['y']), p['size'], p['pinned']) if p['isMotor']: a.motorize(Vector2(p['spinPos']['x'], p['spinPos']['y']), p['spinSpeed'], p['currentSpin']) a.currentSpin = p['currentSpin'] for s in d['lines']: b = Stick(Point.points[s['p1']], Point.points[s['p2']], s['visible']) except Exception: messagebox.showinfo("Error", "Invalid JSON") if pbackup and sbackup: Point.points += pbackup Stick.sticks += sbackup
def plot_random_faces(dataSetPath, jsonLabelsPath, numberToDisplay): fig = plt.figure(figsize=(20, 20)) rows = floor(numberToDisplay / 5) + 1 colums = 4 with open(jsonLabelsPath) as jsonLabelFile: jsonData = jsonLoad(jsonLabelFile) dirs = [ dir for dir in listdir(dataSetPath) if path.isdir(path.join(dataSetPath, dir)) ] for i in range(numberToDisplay): # getting random dir for dictionary randomDir = randomChoice(dirs) dirPath = path.join(dataSetPath, randomDir) # getting files from randomed dir, then getting random file files = [ file for file in listdir(dirPath) if path.isfile(path.join(dirPath, file)) and file.endswith(".jpg") ] randomFile = randomChoice(files) img = plt.imread(path.join(dirPath, randomFile)) fig.add_subplot(rows, colums, i + 1).axis("Off") plt.imshow(img) # getting correct label for randomed file fx = jsonData[randomDir][randomFile]["x"] fy = jsonData[randomDir][randomFile]["y"] plt.scatter(x=[fx], y=[fy], c="r", s=5) plt.show()
def parseJson(file): with open(file, 'r') as f: jsonFile = jsonLoad(f) for people in jsonFile["people"]: if people["age"] >= 30: print("Name: {name}, City: {city}".format(name=people["name"], city=people["city"]))
def _setupSystemLogging(self): configFilePath: str = self._retrieveResourcePath(TravisCli.JSON_LOGGING_CONFIG_FILENAME) with open(configFilePath, 'r') as loggingConfigurationFile: configurationDictionary = jsonLoad(loggingConfigurationFile) config.dictConfig(configurationDictionary) logging.logProcesses = False logging.logThreads = False
def setUpLogging(cls): """""" fqFileName: str = TestBase.retrieveResourcePath( TestBase.JSON_LOGGING_CONFIG_FILENAME) with open(fqFileName, 'r') as loggingConfigurationFile: configurationDictionary = jsonLoad(loggingConfigurationFile) config.dictConfig(configurationDictionary) logging.logProcesses = False logging.logThreads = False
def setupSystemLogging(): configFilePath: str = LocateResources.getResourcesPath( resourcePackageName=LocateResources.RESOURCES_PACKAGE_NAME, bareFileName=LocateResources.JSON_LOGGING_CONFIG_FILENAME) with open(configFilePath, 'r') as loggingConfigurationFile: configurationDictionary = jsonLoad(loggingConfigurationFile) logging.config.dictConfig(configurationDictionary) logging.logProcesses = False logging.logThreads = False
def check(keyPath, commitPath): # load the commitment and keys from file commitment = jsonLoad(file(commitPath, 'r')) secret = jsonLoad(file(keyPath, 'r')) # Load the key, message and HMAC key = secret["key"].decode('hex') authCode = commitment["hmac"].decode('hex') message = commitment["format"].format(secret["value"]) print "Testing message:\n {}".format(message) print "Given key: {}".format(secret["key"]) print "Commit HMAC: {}".format(commitment["hmac"]) testCode = hmac(message, key) print "Calculated HMAC: {}".format(testCode.encode('hex')) if testCode == authCode: print "Message check PASSED" else: print "Message check FAILED"
def buildAmmoMap(): path = "./source_data/weapons/bowgun_ammo_map.json" output_path = "./source_data/weapons/name_ammo_map.json" reverse_map = {} with open(path, 'r') as input: jobj = jsonLoad(input) for key in jobj: weapons = jobj[key] for w in weapons: w = w.replace('\'', '') reverse_map[w] = key with open(output_path, 'w') as output: dump(reverse_map, output, separators=(',\n', ':'))
def getConstants(id): try: openFile = open('settings.json', 'r') except: logEvent("getConstants ERROR", "settings.json not found") raise ValueError("settings.json does not exist or cannot be read") data = jsonLoad(openFile) try: group = data[id] except: logEvent("getConstants ERROR", str(id) + " does not exist") raise ValueError(str(id) + " does not exist in settings.json") openFile.close() return (group)
def json_to_df(json_path, directory): with open(json_path, "r") as file: json = jsonLoad(file) data_frame = io.json.json_normalize(json) indexes = MultiIndex.from_tuples([col.split(".") for col in data_frame.columns]) data_frame.columns = indexes data_frame = data_frame.stack(level=[0, 1]) data_frame = data_frame.set_index(data_frame.index.droplevel(0)) data_frame = data_frame.set_index( Index([path.sep.join([directory] + list(c)) for c in data_frame.index.values]) ) return data_frame
def encJson2dict(fname): try: db = open(fname, "r") try: json = jsonLoad(db) except ValueError as e: json = 'ERROR: Json format error ' + str(fname) + ' --> ' + str(e) res = {"error": True, "message": json} else: res = {"error": False, "message": json} except IOError: json = 'ERROR: Unable to open file ' + fname res = {"error": True, "message": json} finally: db.close() return (res)
def AppendGlossaryEntriesToFile(fileName): try: newKey = input("Enter a word you'd like to define:\n") newValue = input("Enter the definition:\n") newEntry = { newKey : newValue} with open(filename, "r+") as append_file: finalEntry = jsonLoad(append_file) finalEntry.update(newEntry) append_file.seek(0) jsonDump(finalEntry, append_file, indent = 4, sort_keys=True) except Exception as ex: print("An error occured--------------------") print(ex) else: print("Entry added")
def build(): assert pathExists(config_path), "build config file missing!" sql = "" with open(config_path, 'r') as input: config = jsonLoad(input) for table_name in config: if not config[table_name]["active"]: continue table_info = config[table_name] create_sql = createTable(table_name, table_info) sql += "\n" + create_sql insertion_sql = insertionsFromCSV(table_name, table_info) sql += "\n" + insertion_sql return sql
def processWeaponImageMap(): input_path = "./source_data/weapons/weapon_image_map.json" output_path = "./source_data/weapons/test_map.json" output_lines = [] with open(input_path, 'r') as input: jobj = jsonLoad(input) for weapon_name in jobj: words = weapon_name.split() image_name = [] for w in words: if len(w) != w.count('I'): image_name.append(w.lower()) image_name = '_'.join(image_name) + ".png" print (weapon_name, image_name) output_lines.append(("\"%s\": \"%s\",\n" % (weapon_name, image_name))) with open(output_path, 'w') as output: output.write("{\n") for line in output_lines: output.write(line) output.write("\n}")
def loadSettings(self): fileName = "/Users/footballnerd12/Desktop/Python Crash Course/Python Crash Course/alienInvasion/alienInvasionSettings.json" with open(fileName, "r") as settings: contents = jsonLoad(settings) self.bulletSpeed = contents["bulletSpeed"] self.shipSpeed = contents["shipSpeed"] self.alienSpeed = contents["alienSpeed"] self.alienScore = contents["alienScore"] self.nextLevelSpeedFactor = contents["nextLevelSpeedFactor"] self.scoreIncreaseFactor = contents["scoreIncreaseFactor"] self.screenWidth = contents["screenWidth"] self.screenHeight = contents["screenHeight"] self.backgroundColor = contents["backgroundColor"] self.shipLimit = contents["shipLimit"] self.bulletWidth = contents["bulletWidth"] self.bulletHeight = contents["bulletHeight"] self.bulletColor = contents["bulletColor"] self.maxBullets = contents["maxBullets"] self.fleetDropSpeed = contents["fleetDropSpeed"] self.fleetDirection = contents["fleetDirection"]
from librosa import load as loadSong from json import load as jsonLoad from feature_extraction import extract_features from compare import compare originalInfoPath = '/home/luisfmgs/Documents/tcc/original.json' vocalCoverInfoPath = '/home/luisfmgs/Documents/tcc/vocal.json' instrumentalCoverInfoPath = '/home/luisfmgs/Documents/tcc/instrumental.json' with open(originalInfoPath) as data: originalInfo = jsonLoad(data) with open(vocalCoverInfoPath) as data: vocalInfo = jsonLoad(data) with open(instrumentalCoverInfoPath) as data: instrumentalInfo = jsonLoad(data) original_features = [] for inforiginal in originalInfo: original, sr_original = loadSong(path=inforiginal['path'], sr=None) original_features.append(extract_features(original, sr_original)) print('vocal cover') vocalhits = 0 for infocover in vocalInfo: vocalCover, sr_vocalCover = loadSong(path=infocover['path'], sr=None) minscore = 0 originalID = 0
def ReadGlossaryFromFile(fileName): with open(filename, "r") as read_file: file = jsonLoad(read_file) for key, value in file.items(): print(f"{key} : {value}")
def load_json(file_name): file_dict = OrderedDict() with open(file_name, newline="") as json_file: file_dict = jsonLoad(json_file, object_pairs_hook=OrderedDict) return file_dict
def readJsonFile(jsonFile): jsonData = od(jsonLoad(open(jsonFile, mode='r'))) return jsonData
from requests import get as getRequest from json import load as jsonLoad CONFIG = jsonLoad(open('./filesConfig/handle_breeds.json')) def get_breeds(): request = getRequest(**CONFIG["getBreeds"]) return request.json() def process_breeds(breeds=None): if not breeds: raise Exception('No breeds to process') csv_rows = [CONFIG["processBreeds"]["header"]] for breed, subBreeds in breeds["message"].items(): if (len(subBreeds) > 0): for subBreed in subBreeds: csv_rows.append([breed, subBreed]) else: csv_rows.append([breed, ""]) return csv_rows
def readJSONFile(filename, canDie=True): return jsonLoad(openFileForReading(filename, canDie))
import logging from json import load as jsonLoad from json import dumps # from json import dumps from signal import SIGTERM, signal from bottle import Bottle, request, response, run, static_file from webservices.auth.Auth import requires_permission, GET_with_auth from webservices.files.Files import get_static_file, get_directory_listing, get_path_type with open('config.json') as data_file: CONFIG = jsonLoad(data_file) with open('configs/database/sqlite.db.config.json') as dbConfigFile: db_config = jsonLoad(dbConfigFile) HOST = CONFIG['host'] PORT = CONFIG['port'] RELOADER = CONFIG['reloader'] DEBUG = CONFIG['debug'] FILE_DIR = CONFIG['file_dir'] def get_loggers(): logger = logging.getLogger("file_server") logger.setLevel(logging.DEBUG) stream_handler = logging.StreamHandler() stream_handler.setLevel(logging.DEBUG) formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S')
def nitrocliPath(cwd): """Determine the path to the nitrocli release build binary.""" out = check_output(["cargo", "metadata", "--format-version=1"], cwd=cwd) data = jsonLoad(out) return join(data["target_directory"], "release", "nitrocli")
#el metodo dump me permite agregar #el metodo load cargar datos tupla1=('led','cars') try : lectura=open('orellyPython/chapter9_x/p18_1.pkl',mode='rb') print(pickleLoad(lectura)) except FileNotFoundError: with open('orellyPython/chapter9_x/p18_1.pkl',mode='wb') as f: pickleDump(tupla1,f) diccionario1=dict(raza='Rotwailer',nombre='Zeus') try : lectura=open('orellyPython/chapter9_x/p18_1.json',mode='r') print(jsonLoad(lectura)) lectura.close() except FileNotFoundError: with open('orellyPython/chapter9_x//p18_1.json',mode='w') as f: #LA SENTENCIA DESPUES DEL WITH puede ser declarada en fp,osea open(..) jsonDump(diccionario1,fp=f,indent=4) #print(jsonDumps(diccionario1,indent=4),file=f) #f.write(jsonDumps(diccionario1,indent=4)) #DELIMITADOR DE EXCEL ; #DELIMITADOR CSV , try : lectura=open('orellyPython/chapter9_x/p18_1.csv',mode='r') spamReader=reader(lectura, delimiter=';', quotechar='|') for row in spamReader: print(', '.join(row))
from datetime import datetime from json import load as jsonLoad from packages.handle_pokemons import get_total_pokemons, get_pokemons, proccess_pokemons from packages.handle_csv import generate_csv from os import remove as removeFile REQUESTS_CONFIG = jsonLoad(open('./filesConfig/handle_pokemons.json', 'r')) CSV_CONFIG = jsonLoad(open('./filesConfig/handle_csv.json', 'r')) def execute(full): if full: print('[WARNING] Running fully integration') else: CSV_CONFIG["openFile"]["file"] = CSV_CONFIG["openFile"]["file"].format(complement=f"{datetime.today().strftime('%Y%m%d')}-") print('[WARNING] Normal execute') try: removeFile(CSV_CONFIG["openFile"]["file"]) except Exception as e: print('[ERROR] Error in remove old file', CSV_CONFIG["openFile"]["file"]) print(e) total = 0 currentOffset = 0 try: total = get_total_pokemons(config=REQUESTS_CONFIG["getPokemons"])
import csv from json import load as jsonLoad CONFIG = jsonLoad(open('./filesConfig/handle_csv.json')) def generate_csv(rows=None, complement=""): if not rows: raise Exception('No rows to process') CONFIG["openFile"]["file"] = CONFIG["openFile"]["file"].format( complement=complement) with open(**CONFIG["openFile"]) as csvFile: writer = csv.writer(csvFile, **CONFIG["csvWriter"]) writer.writerows(rows)
from json import load as jsonLoad import datetime from urllib.request import urlopen from functools import wraps from bottle import request, response from jose import jwt with open('config.json') as data_file: CONFIG = jsonLoad(data_file)["auth0"] AUTH0_CLIENT_ID = CONFIG["AUTH0_CLIENT_ID"] AUTH0_DOMAIN = CONFIG["AUTH0_DOMAIN"] jsonResp = urlopen("https://" + AUTH0_DOMAIN + "/.well-known/jwks.json") jwks = jsonLoad(jsonResp) def get_token_auth_header(): """Obtains the access token from the Authorization Header """ auth = request.headers.get("Authorization", None) cookie = request.headers.get("Cookie", None) if not auth and not cookie: return { "code": "authorization_header_missing", "description": "Authorization header is expected", "status_code": 401 } if not cookie: one_day_from_now = datetime.datetime.utcnow() + datetime.timedelta( days=1)
def nasaDataParse(file): with open(file, 'r') as f: jsonData = jsonLoad(f) return (jsonData["explanation"])