def load_setting(self, project_repo, base, ext): path = project_repo + base + ext with open(path) as f: data = None if ext in ['.yml', '.yaml']: data = yaml.load(f) elif ext in ['.json5']: data = json5.load(f) elif ext in ['.json']: data = json.load(f) self._loaded[base] = EasyDict(data)
def _do_parse(inp, fmt, encoding, force_types): """Actually parse input. Args: inp: bytes yielding file-like object fmt: format to use for parsing encoding: encoding of `inp` force_types: if `True`, integers, floats, booleans and none/null are recognized and returned as proper types instead of strings; if `False`, everything is converted to strings if `None`, backend return value is used Returns: parsed `inp` (dict or list) containing unicode values Raises: various sorts of errors raised by used libraries while parsing """ res = {} _check_lib_installed(fmt, 'parse') if fmt == 'ini': cfg = configobj.ConfigObj(inp, encoding=encoding) res = cfg.dict() elif fmt == 'json': if six.PY3: # python 3 json only reads from unicode objects inp = io.TextIOWrapper(inp, encoding=encoding) res = json.load(inp, encoding=encoding) elif fmt == 'json5': if six.PY3: inp = io.TextIOWrapper(inp, encoding=encoding) res = json5.load(inp, encoding=encoding) elif fmt == 'toml': if not _is_utf8(encoding): raise AnyMarkupError('toml is always utf-8 encoded according to specification') if six.PY3: # python 3 toml prefers unicode objects inp = io.TextIOWrapper(inp, encoding=encoding) res = toml.load(inp) elif fmt == 'xml': res = xmltodict.parse(inp, encoding=encoding) elif fmt == 'yaml': # guesses encoding by its own, there seems to be no way to pass # it explicitly res = yaml.safe_load(inp) else: raise # unknown format # make sure it's all unicode and all int/float values were parsed correctly # the unicode part is here because of yaml on PY2 and also as workaround for # https://github.com/DiffSK/configobj/issues/18#issuecomment-76391689 return _ensure_proper_types(res, encoding, force_types)
def load(self, path): """Populate the Config class with everything that sits inside the given JSON file path path (string): the path of the JSON config file. """ try: with io.open(path, 'rb') as fobj: data = json.load(fobj) # Put everything. for key, value in data.iteritems(): setattr(self.args, key, value) except IOError: logger.exception("Error opening config file.") except ValueError: logger.exception("Config file is invalid JSON.") else: logger.info("Using config file %s.", path)
def process(file_name, input, clients, output, distance_stops_to_stops, time_stops_to_stops, distance_stops_to_customers, time_stops_to_customer): k = 1 mode = {'0': 'driving', '1': 'walking'} for itera in range(2): # Rename columns names in order to make them latitud and longitud if itera == 0: dataframe = pandas.read_csv(file_name) sub = dataframe[['latitud', 'longitud']] dataframe2 = pandas.read_csv(file_name) sub2 = dataframe2[['latitud', 'longitud']] else: dataframe = pandas.read_csv(file_name) dataframe = dataframe[dataframe.index > 0] dataframe = dataframe.reset_index() sub = dataframe[['latitud', 'longitud']] dataframe2 = pandas.read_csv(clients) sub2 = dataframe2[['latitud', 'longitud']] dffinald = [] dffinalt = [] l = 0 p = 0 for i in range(len(sub.latitud)): dffinald.append([]) dffinalt.append([]) # Agrega una fila for j in range(len(sub2.latitud)): orig_coord = (sub.latitud[i], sub.longitud[i]) dest_coord = (sub2.latitud[j], sub2.longitud[j]) try: # url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode='+str(mode[str(itera)])+'&sensor=false&key={2}'.format(str(orig_coord),str(dest_coord),str(apis[str(k)])) # url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode={2}&sensor=false&key={3}'.format(str(orig_coord),str(dest_coord),+str(mode[str(itera)]),str(apis[str(k)])) url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode={2}&sensor=false&key={3}'.format( str(orig_coord), str(dest_coord), str(mode[str(itera)]), str(apis[str(k)])) result = json5.load(urllib.urlopen(url)) #print result tript = result['rows'][0]['elements'][0]['duration']['value'] tripd = result['rows'][0]['elements'][0]['distance']['value'] dffinald[i].append(tripd) dffinalt[i].append(tript) #print (i, j, 'ok', k) except AttributeError: k = k + 1 #print (i, j, 'AE', k) url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode={2}&sensor=false&key={3}'.format( str(orig_coord), str(dest_coord), str(mode[str(itera)]), str(apis[str(k)])) result = json5.load(urllib._urlopener(url)) tript = result['rows'][0]['elements'][0]['duration']['value'] tripd = result['rows'][0]['elements'][0]['distance']['value'] dffinald[i].append(tripd) dffinalt[i].append(tript) #print result except IOError: #print (i, j, 'IO', k) url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode={2}&sensor=false&key={3}'.format( str(orig_coord), str(dest_coord), str(mode[str(itera)]), str(apis[str(k)])) result = json5.load(urllib._urlopener(url)) tript = result['rows'][0]['elements'][0]['duration']['value'] tripd = result['rows'][0]['elements'][0]['distance']['value'] dffinald[i].append(tripd) dffinalt[i].append(tript) except IndexError: #print (i, j, 'IE', k) url = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins={0}&destinations={1}&mode={2}&sensor=false&key={3}'.format( str(orig_coord), str(dest_coord), str(mode[str(itera)]), str(apis[str(k)])) result = json5.load(urllib._urlopener(url)) tript = result['rows'][0]['elements'][0]['duration']['value'] tripd = result['rows'][0]['elements'][0]['distance']['value'] dffinald[i].append(tripd) dffinalt[i].append(tript) l = l + 1 p = p + 1 #print (i, j, l, p) if (l == 2400): l = 0 k = k + 1 if itera == 0: pandas.DataFrame(dffinald).to_csv(distance_stops_to_stops, index=False, header=None) pandas.DataFrame(dffinalt).to_csv(time_stops_to_stops, index=False, header=None) else: pandas.DataFrame(dffinald).to_csv(distance_stops_to_customers, index=False, header=None) pandas.DataFrame(dffinalt).to_csv(time_stops_to_customer, index=False, header=None)