def get_config(): config = SafeConfigParser() try: custom_config_dir = os.path.expanduser(custom_config_dir) config_path = os.path.join(custom_config_dir, '.ofpy_config') except UnboundLocalError: home_dir = os.path.expanduser('~') config_path = os.path.join(home_dir, '.ofpy_config') if not os.path.exists(config_path): try: make_config(config_path) except Exception as e: logger.exception("It doesn't look like you have a config file, but" " I wasn't able to create the template for you.") else: msg = ('Config file did not exist, so I created a template' ' at ~/.ofpy_config. Please edit with your config.') logger.warning(msg) sys.exit(0) logger.debug("Parsing config file {}".format(config_path)) with open(config_path) as r: try: config.read_file(r) except AttributeError: config.readfp(r) return config
def loadConfig(cfg_file): """Returns a configparser object from the parsed config file""" if not os.path.exists(cfg_file): raise OSError('ERROR: Configuration file slappd.cfg does not exist') cfg = SafeConfigParser() cfg.read(cfg_file) return cfg
def add_project_hook(project_name): '''This function is used to customise the paths to data files when datasets are imported. When project_name is "AWN" /etc/awnet.ini defines the magnetometer site name then remap the URLs for the site to local filenames, eg for use on the AuroraWatchNet magnetometer data loggers. ''' if project_name == 'AWN': filename = '/etc/awnet.ini' if not os.path.exists(filename): return try: config = SafeConfigParser() config.read(filename) site = config.get('magnetometer', 'site').upper() except Exception as e: print('Bad config file ' + filename + ': ' + str(e)) return if site not in ap.projects[project_name]: return # Unknown site # For data which matches this host's site convert all URLS to # local paths. make_data_local = lambda path, project, site, data_type, archive :\ path.replace('http://aurorawatch.lancs.ac.uk/data/', '/data/') # Convert all URLS to local paths ap.tools.change_load_data_paths(project_name, make_data_local)
def from_config(cls, cfg, prefix=PREFIX, section='DEFAULT'): '''Create query from config file settings @type cfg: basestring /ConfigParser derived type @param cfg: configuration file path or ConfigParser type object @type prefix: basestring @param prefix: prefix for option names e.g. "attributeQuery." @type section: baestring @param section: configuration file section from which to extract parameters. ''' if isinstance(cfg, str): cfg_filepath = os.path.expandvars(cfg) here_dir = os.path.dirname(cfg_filepath) _cfg = SafeConfigParser(defaults=dict(here=here_dir)) _cfg.optionxform = str _cfg.read(cfg_filepath) elif isinstance(cfg, ConfigParser): _cfg = cfg else: raise AttributeError('Expecting basestring or ConfigParser type ' 'for "cfg" attribute; got %r type' % type(cfg)) # Get items for this section as a dictionary so that from_kw can # used to update the object kw = dict(_cfg.items(section)) if 'prefix' not in kw and prefix: kw['prefix'] = prefix return cls.from_kw(**kw)
class GitConfigParser(): CORE = 'core' def __init__(self, branch): self.section = branch self.file = join(GIT_DIR, '.git', 'gitcc') self.parser = SafeConfigParser(); self.parser.add_section(self.section) def set(self, name, value): self.parser.set(self.section, name, value) def read(self): self.parser.read(self.file) def write(self): self.parser.write(open(self.file, 'w')) def getCore(self, name, *args): return self._get(self.CORE, name, *args) def get(self, name, *args): return self._get(self.section, name, *args) def _get(self, section, name, default=None): if not self.parser.has_option(section, name): return default return self.parser.get(section, name) def getList(self, name, default=None): return self.get(name, default).split('|') def getInclude(self): return self.getCore('include', '.').split('|') def getBranches(self): return self.getList('branches', 'main') def getExtraBranches(self): return self.getList('_branches', 'main')
def main(): config = SafeConfigParser() config.readfp(open('config.cfg')) set_logging(config.get('logs', 'path'), 'xivdmgui') dat_manager = DatManager(config.get('game', 'path')) gen_manager = GenManager(dat_manager) app = QtGui.QApplication(sys.argv) main_window = QtGui.QMainWindow() main_window.setWindowTitle('Model Viewer') gl_widget = OpenGLWidget(dat_manager) list_view = ListWidget(sorted(walk_dict(gen_manager.get_category('models'))), gl_widget) splitter = QtGui.QSplitter() splitter.addWidget(list_view) splitter.addWidget(gl_widget) main_window.setCentralWidget(splitter) main_window.resize(640, 480) main_window.show() sys.exit(app.exec_())
def main(): parser = SafeConfigParser() parser.read('settings.ini') connection = Connection(parser.get('mongodb', 'server')) db = None exec('db = connection.' + parser.get('mongodb', 'db')) ai = mc(db, "markov", exp=1.2) deck = [] for i in range(100): c = ai.get() if(isinstance(c, type(""))): deck.append(c) deck.sort() deck_clean = bag(lambda: 0) for c in deck: deck_clean[c] += 1 for c in set(deck): print ("%2i %s" % (deck_clean[c], c))
def ImportChanges(): parsers = {} for r in races: hotkeyfile_parser = SafeConfigParser() hotkeyfile_parser.optionxform = str hotkeyfile_parser.read(prefix + " " + r + "LM " + suffix) parsers[r] = hotkeyfile_parser hotkeys = [] for item_pair in seed_parser.items("Hotkeys"): hotkey = get_hotkey(item_pair, "Hotkeys") for r in races: value = hotkey.default if parsers[r].has_option("Hotkeys", hotkey.name): value = parsers[r].get("Hotkeys", hotkey.name) setattr(hotkey, r, value) hotkeys.append(hotkey) commands = [] for item_pair in seed_parser.items("Commands"): hotkey = get_hotkey(item_pair, "Commands") for r in races: value = hotkey.default if parsers[r].has_option("Commands", hotkey.name): value = parsers[r].get("Commands", hotkey.name) setattr(hotkey, r, value) commands.append(hotkey) SaveSeedFile(hotkeys, commands)
def loadConfig(cls): config = SafeConfigParser() if os.path.isfile(CONFIG_FILE): config.read_file(codecs.open(CONFIG_FILE, encoding='utf-8')) # The default config cls.config = {} cls.config['command_open'] = None cls.config['player'] = 1 cls.config['debug'] = 0 # Load the options if config.has_section('options'): for key, value in config.items('options'): cls.config[key] = value # Load the series cls.series = [] for section in config.sections(): if section != 'options': title = config.get(section, 'title') videos = config.get(section, 'videos') theTvDb = config.getint(section, 'theTvDb') lang = config.get(section, 'lang') cls.series.append([section, title, videos, theTvDb, lang])
def get_credentials(): ''' Get a SafeConfigParser instance with FACT credentials On the first call, you will be prompted for the FACT password The folling credentials are stored: - telegram - token - database - user - password - host - database - twilio - sid - auth_token - number use get_credentials().get(group, element) to retrieve elements ''' with resource_stream('fact_credentials', 'credentials.encrypted') as f: print('Please enter the current, universal FACT password') passwd = getpass() decrypted = decrypt(passwd, f.read()).decode('utf-8') config = SafeConfigParser() config.readfp(StringIO(decrypted)) return config
def __init__(self): # deal with configuration file # configparser.read(default.cfg) cfg = SafeConfigParser() cfg.read("default.cfg") data = cfg.get("Paths", "data") orthography_profile = cfg.get("Paths", "orthography_profile") # set variables, e.g. source, orthography parser, etc. self.data = open(data, "r") self.o = OrthographyParser(orthography_profile) # self.o = GraphemeParser() self._languages = collections.defaultdict(int) # given unique ID to each unique language name self._concepts = collections.defaultdict(int) # ... self._counterparts = collections.defaultdict(int) # .. self._wordlist_iterator = self._process_input(self.data) # print(type(self.iterator)) # print(len(self.counterparts)) # words = self.get_qlc_tokenized_words() """ count = 0 for line in words: if line != "": print(line) count += 1 print(count) """ """
def parseConfig(self, cfg, prefix='', section='DEFAULT'): '''Read config file settings :type cfg: basestring /ConfigParser derived type :param cfg: configuration file path or ConfigParser type object :type prefix: basestring :param prefix: prefix for option names e.g. "attributeQuery." :type section: baestring :param section: configuration file section from which to extract parameters. ''' if isinstance(cfg, str): cfgFilePath = path.expandvars(cfg) hereDir = path.dirname(cfgFilePath) _cfg = SafeConfigParser(defaults=dict(here=hereDir)) _cfg.optionxform = str _cfg.read(cfgFilePath) elif isinstance(cfg, ConfigParser): _cfg = cfg else: raise AttributeError('Expecting basestring or ConfigParser type ' 'for "cfg" attribute; got %r type' % type(cfg)) # Get items for this section as a dictionary so that parseKeywords can # used to update the object kw = dict(_cfg.items(section)) if 'prefix' not in kw and prefix: kw['prefix'] = prefix self.parseKeywords(**kw)
def temporary_store_decorator(config_files_directory = default_config_files_directory, file_name = None): parser = SafeConfigParser() config_ini = os.path.join(config_files_directory, 'config.ini') read_config_file_name = parser.read([config_ini]) tmp_directory = parser.get('data', 'tmp_directory') assert tmp_directory is not None, \ 'tmp_directory is not set: {!r} in {}'.format(tmp_directory, read_config_file_name) assert os.path.isabs(tmp_directory), \ 'tmp_directory should be an absolut path: {!r} in {}'.format(tmp_directory, read_config_file_name) if not os.path.isdir(tmp_directory): log.info('tmp_directory does not exist: {!r} in {}. Creating it.'.format(tmp_directory, read_config_file_name)) os.makedirs(tmp_directory) assert file_name is not None if not file_name.endswith('.h5'): file_name = "{}.h5".format(file_name) file_path = os.path.join(tmp_directory, file_name) def actual_decorator(func): def func_wrapper(*args, **kwargs): temporary_store = HDFStore(file_path) try: return func(*args, temporary_store = temporary_store, **kwargs) finally: gc.collect() temporary_store.close() return func_wrapper return actual_decorator
def loadJson(self, data): #import rpdb2; rpdb2.start_embedded_debugger('test') newConf = SafeConfigParser() try: config = json.loads(data) except ValueError as e: raise e try: for kSec, vSec in config.items(): try: newConf.add_section(kSec) except NoSectionError: pass for kVal, vVal in vSec.items(): try: newConf.set(kSec, kVal, str(vVal['value']).replace('%', '%%')) except NoSectionError as e: pass except Exception as e: #import rpdb2; rpdb2.start_embedded_debugger('test') print(e) # now replace self._proxies = newConf._proxies self._sections = newConf._sections self.notify(FLSConfiguration.STATE_CHANGED)
def load_setup_connection_string(section): """ Attempts to read the default connection string from the setup.cfg file. If the file does not exist or if it exists but does not contain the connection string, None is returned. If the file exists but cannot be parsed, an exception is raised. """ from os.path import exists, join, dirname, splitext, basename from configparser import SafeConfigParser FILENAME = "setup.cfg" KEY = "connection-string" path = join(dirname(dirname(abspath(__file__))), "tmp", FILENAME) if exists(path): try: p = SafeConfigParser() p.read(path) except: raise SystemExit("Unable to parse %s: %s" % (path, sys.exc_info()[1])) if p.has_option(section, KEY): return p.get(section, KEY) return None
def load_library(filepath): """Load sensor label configuration file. Parameters ---------- filepath : string Returns ------- library : SafeConfigParser """ parser = SafeConfigParser() parser.read(filepath) sections = ['components', 'fluids', 'locations', 'measurement types'] missing = set(sections) - set(parser.sections()) if missing: print('Found sections: ', sorted(parser.section())) print('Missing sections: ', sorted(missing)) return parser
def __init__ (self, app_name, defaults = {}, filename = None): SafeConfigParser.__init__(self) # First setup default values for apps specified # by the call to the contructor self.set_config_section(app_name, defaults) self.config_dir = config_dir = path.join(basedir.xdg_config_home, 'laditools') if not filename: filename = app_name + '.conf' self.config_filename = config_filename = path.join(config_dir, filename) if not exists (config_dir): mkdir (config_dir, 0o755) try: self.read(config_filename) except MissingSectionHeaderError: if yaml: if self._migrate_configuration() == 0: self.save() else: # new empty file pass # go on otherwise except: raise MalformedConfigError()
def loadconfig(): configfile = SafeConfigParser() configfile.read('battery.cfg') for section in configfile.sections(): config[section] = {} for key, val in configfile.items(section): config[section][key] = literal_eval(val)
def main(): # loading configuration file config = SafeConfigParser() if config.read('config.ini') != ['config.ini']: raise OSError('no config.ini file present') # loading CSV file try: pdf = pd.read_csv("DataIN/DigiKeyParts.csv") print("Loaded Part List") except OSError as e: print(e) exit() if len(pdf) == 0: print("No parts listed in CSV file") exit() columns = ['Digi-Key Part Number', 'Manufacturer Part Number', 'Datasheet', 'Quantity Available', 'Currency', 'Fee', 'Minimum Quantity'] for i in range(1,10): columns.append('Price Break' + str(i)) columns.append('Price Break Q' + str(i)) # looping over parts list, retrieving DigiKey Data for i in range(0, len(pdf)): x = pdf.iloc[i].values print("attempting retrieval of: " + str(x[0])) try: retrieveData(x[0], processURL(x[0], x[1]), config, columns) except Exception as e: print("Part: " + str(x[0]) + " Retrieval Failed") print(e)
def loadGameSettings(self,_presetSuf): preset_parser = SafeConfigParser() preset_parser.read(os.path.join(os.path.join(self.datadir.replace('main.exe','').replace('main.exe',''),'settings/rules',),_presetSuf+'.ini')) preset = 'preset_' + _presetSuf self.setting['current_preset'] = _presetSuf self.setting['gravity'] = float(getNumber(preset_parser, preset, 'gravityMultiplier')) / 100.0 self.setting['weight'] = float(getNumber(preset_parser, preset, 'weightMultiplier')) / 100.0 self.setting['friction'] = float(getNumber(preset_parser, preset, 'frictionMultiplier')) / 100.0 self.setting['airControl'] = float(getNumber(preset_parser, preset, 'airControlMultiplier')) / 100.0 self.setting['hitstun'] = float(getNumber(preset_parser, preset, 'hitstunMultiplier')) / 100.0 self.setting['hitlag'] = float(getNumber(preset_parser, preset, 'hitlagMultiplier')) / 100.0 self.setting['shieldStun'] = float(getNumber(preset_parser, preset, 'shieldStunMultiplier')) / 100.0 self.setting['ledgeConflict'] = getString(preset_parser, preset, 'ledgeConflict') sweetSpotDict = {'large': [128,128], 'medium': [64,64], 'small': [32,32]} self.setting['ledgeSweetspotSize'] = sweetSpotDict[getString(preset_parser, preset, 'ledgeSweetspotSize')] self.setting['ledgeSweetspotForwardOnly'] = getBoolean(preset_parser, preset, 'ledgeSweetspotForwardOnly') self.setting['teamLedgeConflict'] = getBoolean(preset_parser, preset, 'teamLedgeConflict') self.setting['ledgeInvincibilityTime'] = getNumber(preset_parser, preset, 'ledgeInvincibilityTime') self.setting['regrabInvincibility'] = getBoolean(preset_parser, preset, 'regrabInvincibility') self.setting['slowLedgeWakeupThreshold'] = getNumber(preset_parser, preset, 'slowLedgeWakeupThreshold') self.setting['airDodgeType'] = getString(preset_parser, preset, 'airDodgeType') self.setting['freeDodgeSpecialFall'] = getBoolean(preset_parser, preset, 'freeDodgeSpecialFall') self.setting['enableWavedash'] = getBoolean(preset_parser, preset, 'enableWavedash') self.setting['airDodgeLag'] = int(getNumber(preset_parser, preset, 'airDodgeLag')) self.setting['lagCancel'] = getString(preset_parser, preset, 'lagCancel') print(self.setting)
def file_list(root): # read the root file, get its [config] section # and use it to construct the file list. conf = SafeConfigParser() conf.read(root) try: dirlist = conf.get("config", "path").replace(' ', '').split(',') except: dirlist = [] try: files = conf.get("config", "files").replace(' ', '').split(',') except: files = [] root = os.path.abspath(root) # all relative pathnames will be relative to the rootdir rootdir = os.path.dirname(root) flist = [root] dirlist = [os.path.abspath("%s/%s" % (rootdir, x)) if not os.path.isabs(x) else os.path.abspath(x) for x in dirlist] # insert the directory of the root file at the beginning dirlist.insert(0, rootdir) # import pdb; pdb.set_trace() for d in dirlist: for f in files: fnm = "%s/%s" % (d, f) if fnm in flist: continue if os.access(fnm, os.F_OK): fnmlist = file_list(fnm) flist += fnmlist return uniq(flist)
def import_config(self,config_name,sections,listnames=[],intnames=[]): "Imports the parameters of a section specified in the configuration file as class attributes" #If a single section/name is provided convert to list if isinstance(sections,str): sections=[sections] if isinstance(intnames,str): intnames=[intnames] parser = SafeConfigParser() parser.optionxform = str # make option names case sensitive found = parser.read(config_name) if not found: raise ValueError('No config file found!') for name in sections: if not name in parser.sections(): raise ValueError('Section {sec} not found in {config}'.format(sec=name,config=config_name)) self.__dict__.update(parser.items(name)) #Convert strings to integer where appropriate for name in intnames: if not (hasattr(self,name)): raise ValueError('Parameter {par} not found in {config}'.format(par=name,config=config_name)) setattr(self,name,int(getattr(self,name))) for name in listnames: if not (hasattr(self,name)): raise ValueError('Parameter {par} not found in {config}'.format(par=name,config=config_name)) setattr(self,name,ast.literal_eval(getattr(self,name)))
def readPreferences(self): """Reads in the preference file. This method will throw exceptions when errors occur: - FileReadException: unable to parse the configuraiton file - SectionNotFoundException: unable to locate a required section - OptionNotFoundException: unable to locate a required option - UnkownOptionException: unkown option encountered - InvalidOptionException: option has wrong value For the configuration file format look at the bottom of this file. """ parser = SafeConfigParser() if len(parser.read([self._file])) != 1: raise FileReadException(self._file) try: # source sn = 'source' kvp = parser.items(sn) self.__parseSourceSection(sn, kvp) except (configparser.NoSectionError): raise SectionNotFoundException(self._file, sn)
class ConfigParsing(): def __init__(self,configfile='fail2ban-cluster.conf'): self.validsections=['general','monitor','subscriber','publisher'] self.parser=SafeConfigParser() try: self.parser.read(configfile) except: raise ValueError('configuration file does not exist') #print('CONFIGPARSING.INIT.OPTIONS=',self.parser.options(self.section)) def Section(self,section=None): if section==None: raise ValueError('no section specified') dict1={} if not section in self.validsections: raise ValueError('requested section is invalid or inexistant') options = self.parser.options(section) for option in options: try: dict1[option]=self.parser.get(section,option) except: dict1[option]=None #TODO: add stderr error, must check daemon.py return dict1
def to_ical(self, tasks): from icalendar import Calendar, Event ical = Calendar() ical.add('version', '2.0') ical.add('prodid', '-//test//hoge//EN') for task in tasks: event = Event() event.add('summary', task['detail']) event.add('description', task['description']) config = SafeConfigParser() config.read('grn2ical.conf') grn_url = config.get('garoon', 'view_url') event.add('url', '%s/schedule/view?event=%s' % (grn_url, task['id'])) if task['all_day'] == True: event.add('dtstart', dateutil.parser.parse(task['start']).date()) event.add('dtend', dateutil.parser.parse(task['end']).date()) else: event.add('dtstart', dateutil.parser.parse(task['start'])) event.add('dtend', dateutil.parser.parse(task['end'])) if 'rrule' in task.keys(): event.add('rrule', task['rrule']) if 'exdate' in task.keys(): for exdate in task['exdate']: event.add('exdate', dateutil.parser.parse(exdate)) if 'location' in task.keys(): event.add('location', task['location']) ical.add_component(event) return ical.to_ical()
def update_sd_units(container, service, *args): if len(container.dependencies) == 0: return # This container doesn't use overlayfs name = container.name mount_name = escape_path(name) # Prepare the overlayfs mount unit mount = SafeConfigParser() mount.optionxform = str mount['Unit'] = { 'Description': 'OverlayFS for {}'.format(name), 'PartOf': '{}.service'.format(mount_name) } mount['Mount'] = container.mounts()[0] mount_path = subprocess.check_output(['systemd-escape', '--path', mount['Mount']['Where']]).strip().decode('utf-8') dst_filename = "/etc/systemd/system/{}.mount".format(mount_path) with open(dst_filename, 'w') as f: mount.write(f) unit = service['Unit'] if 'Requires' not in unit: unit['Requires'] = '' if 'After' not in unit: unit['After'] = '' unit['Requires'] += 'var-lib-machines-{}-fs.mount '.format(mount_name) unit['After'] += 'var-lib-machines-{}-fs.mount '.format(mount_name)
class PluginManager(object): def __init__(self): """ Initialize the PluginManager including: - plugin configuration directory - plugin search locations """ self.config_path = save_config_path("yasibo") self.config_file = os.path.join(self.config_path, "plugins.conf") places = [] [places.append(os.path.join(path, "yasibo", "plugins")) for path in xdg_data_dirs] # dev location places.append("%s/../plugins" % os.path.dirname(os.path.abspath(__file__))) PluginManagerSingleton.setBehaviour([ConfigurablePluginManager, VersionedPluginManager]) self.manager = PluginManagerSingleton.get() locator = self.manager.getPluginLocator() locator.setPluginInfoExtension("yasibo-plugin") self.manager.setPluginPlaces(places) self.config = SafeConfigParser() self.config.read(self.config_file) self.manager.setConfigParser(self.config, self.save) self.manager.collectPlugins() log.debug("Config file: %s" % self.config_file) def save(self): """ Saves the plugin configuration to file. """ f = open(self.config_file, "w") self.config.write(f) f.close()
def getConfPart(name, section='main'): parser = SafeConfigParser() # Get absolute dir for config file configLocation = __file__.replace("app/helpers/getConfig.py","config.ini") parser.read(configLocation) return parser.get(section, name)
def load_setup_connection_string(section): """ Attempts to read the default connection string from the setup.cfg file. If the file does not exist or if it exists but does not contain the connection string, None is returned. If the file exists but cannot be parsed, an exception is raised. """ from os.path import exists, join, dirname, splitext, basename from configparser import SafeConfigParser FILENAME = 'setup.cfg' KEY = 'connection-string' path = dirname(abspath(__file__)) while True: fqn = join(path, 'tmp', FILENAME) if exists(fqn): break parent = dirname(path) print('{} --> {}'.format(path, parent)) if parent == path: return None path = parent try: p = SafeConfigParser() p.read(fqn) except: raise SystemExit('Unable to parse %s: %s' % (path, sys.exc_info()[1])) if p.has_option(section, KEY): return p.get(section, KEY)
def main(ctx): out_files = [open(join(ctx.DIR, "%s.log" % c), 'w') for c in columns] for i in range(len(columns)): out_files[i].write("#LABEL:%s\n" % columns[i]) with open(join(ctx.DIR, 'hist.csv'), 'r') as csv: csv.readline() for line in csv: vs = line.split(', ') for i in range(len(columns)): out_files[i].write("%d %s\n" % (int(vs[0]), vs[i+1].rstrip())) chart_type = "xy" cp = SafeConfigParser(allow_no_value=True) cp.read(ctx.job_file) for s in cp.sections(): try: epoch = cp.get(s, 'log_unix_epoch') chart_type = "timeseries" except: pass print ("Chart Type: %s" % chart_type) with open(join(ctx.DIR, 'results.html'), 'w') as fp: fp.write(html % (chart_type,))
def write_config(dictionary, out=sys.stdout): cp = SafeConfigParser(allow_no_value=True) cp.read_dict(dictionary) cp.write(out)
tools = os.path.join(os.environ['SUMO_HOME'], 'tools') sys.path.append(tools) else: sys.exit("Please declare the environment variable 'SUMO_HOME'") import traci from sumo_env import SumoEnvironment from agent import Agent from epsilon_greedy import EpsilonGreedy from configparser import SafeConfigParser from util import save_csv, plot if __name__ == '__main__': # load default config rl_params = SafeConfigParser() rl_params.read('rl.ini') simulation_step = int(rl_params.get('DEFAULT', 'num_simulations')) # define output csv file experiment_time = str(datetime.now()).split('.')[0] out_csv = 'outputs/{}_{}_{}Agent'.format( experiment_time, rl_params.get('DEFAULT', 'signal'), rl_params.get('DEFAULT', 'rl_agent')) # init sumo environment signal_type = rl_params.get('DEFAULT', 'signal') #Get the signal phases for the traffic network if signal_type == 'one_way': signal_phase = [
from QGIS_FMV.geo import sphere from QGIS_FMV.klvdata.element import UnknownElement from QGIS_FMV.klvdata.streamparser import StreamParser from QGIS_FMV.utils.QgsFmvLayers import (addLayerNoCrsDialog, ExpandLayer, UpdateFootPrintData, UpdateTrajectoryData, UpdateBeamsData, UpdatePlatformData, UpdateFrameCenterData, UpdateFrameAxisData, SetcrtSensorSrc, SetcrtPltTailNum) from QGIS_FMV.utils.QgsUtils import QgsUtils as qgsu parser = SafeConfigParser() parser.read(os.path.join(dirname(dirname(abspath(__file__))), 'settings.ini')) frames_g = parser['LAYERS']['frames_g'] Reverse_geocoding_url = parser['GENERAL']['Reverse_geocoding_url'] min_buffer_size = int(parser['GENERAL']['min_buffer_size']) Platform_lyr = parser['LAYERS']['Platform_lyr'] Footprint_lyr = parser['LAYERS']['Footprint_lyr'] FrameCenter_lyr = parser['LAYERS']['FrameCenter_lyr'] dtm_buffer = int(parser['GENERAL']['DTM_buffer_size']) ffmpegConf = parser['GENERAL']['ffmpeg'] try: from homography import from_points except ImportError: None
def __init__(self, driver_conf): Driver.__init__(self) # some behaviors self._autopilot = driver_conf.autopilot self._reset_period = driver_conf.reset_period # those reset period are in the actual system time, not in simulation time self._goal_reaching_threshold = 3 self.use_planner = driver_conf.use_planner # we need the planner to find a valid episode, so we initialize one no matter what self._world = None self._vehicle = None self._agent_autopilot = None self._camera_center = None self._spectator = None # (last) images store for several cameras self._data_buffers = dict() self.update_once = False self._collision_events = [] self.collision_sensor = None if __CARLA_VERSION__ == '0.8.X': self.planner = Planner(driver_conf.city_name) else: self.planner = None self.use_planner = False # resources self._host = driver_conf.host self._port = driver_conf.port # various config files self._driver_conf = driver_conf self._config_path = driver_conf.carla_config # some initializations self._straight_button = False self._left_button = False self._right_button = False self._rear = False self._recording = False self._skiped_frames = 20 self._stucked_counter = 0 self._prev_time = datetime.now() self._episode_t0 = datetime.now() self._vehicle_prev_location = namedtuple("vehicle", "x y z") self._vehicle_prev_location.x = 0.0 self._vehicle_prev_location.y = 0.0 self._vehicle_prev_location.z = 0.0 self._camera_left = None self._camera_right = None self._camera_center = None self._actor_list = [] self._sensor_list = [] self._weather_list = [ 'ClearNoon', 'CloudyNoon', 'WetNoon', 'WetCloudyNoon', 'MidRainyNoon', 'HardRainNoon', 'SoftRainNoon', 'ClearSunset', 'CloudySunset', 'WetSunset', 'WetCloudySunset', 'MidRainSunset', 'HardRainSunset', 'SoftRainSunset' ] self._current_weather = 4 self._current_command = 2.0 # steering wheel self._steering_wheel_flag = True if self._steering_wheel_flag: self._is_on_reverse = False self._control = VehicleControl() self._parser = SafeConfigParser() self._parser.read('wheel_config.ini') self._steer_idx = int( self._parser.get('G29 Racing Wheel', 'steering_wheel')) self._throttle_idx = int( self._parser.get('G29 Racing Wheel', 'throttle')) self._brake_idx = int(self._parser.get('G29 Racing Wheel', 'brake')) self._reverse_idx = int( self._parser.get('G29 Racing Wheel', 'reverse')) self._handbrake_idx = int( self._parser.get('G29 Racing Wheel', 'handbrake')) self.last_timestamp = lambda x: x self.last_timestamp.elapsed_seconds = 0.0 self.last_timestamp.delta_seconds = 0.2 self.initialize_map(driver_conf.city_name)
class CarlaHuman(Driver): def __init__(self, driver_conf): Driver.__init__(self) # some behaviors self._autopilot = driver_conf.autopilot self._reset_period = driver_conf.reset_period # those reset period are in the actual system time, not in simulation time self._goal_reaching_threshold = 3 self.use_planner = driver_conf.use_planner # we need the planner to find a valid episode, so we initialize one no matter what self._world = None self._vehicle = None self._agent_autopilot = None self._camera_center = None self._spectator = None # (last) images store for several cameras self._data_buffers = dict() self.update_once = False self._collision_events = [] self.collision_sensor = None if __CARLA_VERSION__ == '0.8.X': self.planner = Planner(driver_conf.city_name) else: self.planner = None self.use_planner = False # resources self._host = driver_conf.host self._port = driver_conf.port # various config files self._driver_conf = driver_conf self._config_path = driver_conf.carla_config # some initializations self._straight_button = False self._left_button = False self._right_button = False self._rear = False self._recording = False self._skiped_frames = 20 self._stucked_counter = 0 self._prev_time = datetime.now() self._episode_t0 = datetime.now() self._vehicle_prev_location = namedtuple("vehicle", "x y z") self._vehicle_prev_location.x = 0.0 self._vehicle_prev_location.y = 0.0 self._vehicle_prev_location.z = 0.0 self._camera_left = None self._camera_right = None self._camera_center = None self._actor_list = [] self._sensor_list = [] self._weather_list = [ 'ClearNoon', 'CloudyNoon', 'WetNoon', 'WetCloudyNoon', 'MidRainyNoon', 'HardRainNoon', 'SoftRainNoon', 'ClearSunset', 'CloudySunset', 'WetSunset', 'WetCloudySunset', 'MidRainSunset', 'HardRainSunset', 'SoftRainSunset' ] self._current_weather = 4 self._current_command = 2.0 # steering wheel self._steering_wheel_flag = True if self._steering_wheel_flag: self._is_on_reverse = False self._control = VehicleControl() self._parser = SafeConfigParser() self._parser.read('wheel_config.ini') self._steer_idx = int( self._parser.get('G29 Racing Wheel', 'steering_wheel')) self._throttle_idx = int( self._parser.get('G29 Racing Wheel', 'throttle')) self._brake_idx = int(self._parser.get('G29 Racing Wheel', 'brake')) self._reverse_idx = int( self._parser.get('G29 Racing Wheel', 'reverse')) self._handbrake_idx = int( self._parser.get('G29 Racing Wheel', 'handbrake')) self.last_timestamp = lambda x: x self.last_timestamp.elapsed_seconds = 0.0 self.last_timestamp.delta_seconds = 0.2 self.initialize_map(driver_conf.city_name) def start(self): if __CARLA_VERSION__ == '0.8.X': self.carla = CarlaClient(self._host, int(self._port), timeout=120) self.carla.connect() else: self.carla = CarlaClient(self._host, int(self._port)) self.carla.set_timeout(5000) wd = self.carla.get_world() self.wd = wd settings = wd.get_settings() settings.synchronous_mode = True wd.apply_settings(settings) self._reset() if not self._autopilot: pygame.joystick.init() joystick_count = pygame.joystick.get_count() if joystick_count > 1: print("Please Connect Just One Joystick") raise ValueError() self.joystick = pygame.joystick.Joystick(0) self.joystick.init() def test_alive(self): if not hasattr(self, "wd"): return False wd = self.wd wd.tick() try: wd.wait_for_tick(5.0) except: return False return True def __del__(self): if hasattr(self, 'carla'): print("destructing the connection") if __CARLA_VERSION__ == '0.8.X': self.carla.disconnect() else: alive = self.test_alive() # destroy old actors print('destroying actors') if alive: if len(self._actor_list) > 0: for actor in self._actor_list: actor.destroy() self._actor_list = [] print('done.') if self._vehicle is not None: if alive: self._vehicle.destroy() self._vehicle = None if self._camera_center is not None: if alive: self._camera_center.destroy() self._camera_center = None if self._camera_left is not None: if alive: self._camera_left.destroy() self._camera_left = None if self._camera_right is not None: if alive: self._camera_right.destroy() self._camera_right = None if self.collision_sensor is not None: if alive: self.collision_sensor.sensor.destroy() self.collision_sensor = None # pygame.quit() # if self._camera is not None: # self._camera.destroy() # self._camera = None # if self._vehicle is not None: # self._vehicle.destroy() # self._vehicle = None def try_spawn_random_vehicle_at(self, blueprints, transform, auto_drive=True): blueprint = random.choice(blueprints) if blueprint.has_attribute('color'): color = random.choice( blueprint.get_attribute('color').recommended_values) blueprint.set_attribute('color', color) vehicle = self._world.try_spawn_actor(blueprint, transform) if vehicle is not None: self._actor_list.append(vehicle) if auto_drive: # TODO: this won't work in 0.9.5 with Exp_Town # however, we don't have traffic in that town right now, so we just ignore this vehicle.set_autopilot() #print('spawned %r at %s' % (vehicle.type_id, transform.location)) return True return False def get_parking_locations(self, filename, z_default=0.0, random_perturb=False): with open(filename, "r") as f: lines = f.readlines() ans = [] for line in lines: x, y, yaw = [float(v.strip()) for v in line.split(",")] if random_perturb: x += np.random.normal( 0, scale=self._driver_conf.extra_explore_location_std) y += np.random.normal( 0, scale=self._driver_conf.extra_explore_location_std) yaw += np.random.normal( 0, scale=self._driver_conf.extra_explore_yaw_std) ans.append( carla.Transform(location=carla.Location(x=x, y=y, z=z_default), rotation=carla.Rotation(roll=0, pitch=0, yaw=yaw))) return ans def print_transform(self, t): print(t.location.x, t.location.y, t.location.z) print(t.rotation.roll, t.rotation.pitch, t.rotation.yaw) def _reset(self): self._episode_t0 = datetime.now() if __CARLA_VERSION__ == '0.8.X': # create the carla config based on template and the params passed in config = ConfigParser() config.optionxform = str config.read(self._config_path) config.set('CARLA/LevelSettings', 'NumberOfVehicles', self._driver_conf.cars) config.set('CARLA/LevelSettings', 'NumberOfPedestrians', self._driver_conf.pedestrians) config.set('CARLA/LevelSettings', 'WeatherId', self._driver_conf.weather) output = io.StringIO() config.write(output) scene_descriptions = self.carla.load_settings(output.getvalue()) # based on the scene descriptions, find the start and end positions self.positions = scene_descriptions.player_start_spots # the episode_config saves [start_index, end_index] self.episode_config = find_valid_episode_position( self.positions, self.planner) self.carla.start_episode(self.episode_config[0]) print('RESET ON POSITION ', self.episode_config[0], ", the target location is: ", self.episode_config[1]) else: # destroy old actors print('destroying actors') for actor in self._actor_list: actor.destroy() self._actor_list = [] print('done.') # TODO: spawn pedestrains # TODO: spawn more vehicles if self._autopilot: self._current_weather = self._weather_list[ int(self._driver_conf.weather) - 1] else: self._current_weather = random.choice(self._weather_list) if not self._autopilot: # select one of the random starting points previously selected start_positions = np.loadtxt(self._driver_conf.positions_file, delimiter=',') if len(start_positions.shape) == 1: start_positions = start_positions.reshape( 1, len(start_positions)) # TODO: Assign random position from file WINDOW_WIDTH = 768 WINDOW_HEIGHT = 576 CAMERA_FOV = 103.0 CAMERA_CENTER_T = carla.Location(x=0.7, y=-0.0, z=1.60) CAMERA_LEFT_T = carla.Location(x=0.7, y=-0.4, z=1.60) CAMERA_RIGHT_T = carla.Location(x=0.7, y=0.4, z=1.60) CAMERA_CENTER_ROTATION = carla.Rotation(roll=0.0, pitch=0.0, yaw=0.0) CAMERA_LEFT_ROTATION = carla.Rotation(roll=0.0, pitch=0.0, yaw=-45.0) CAMERA_RIGHT_ROTATION = carla.Rotation(roll=0.0, pitch=0.0, yaw=45.0) CAMERA_CENTER_TRANSFORM = carla.Transform( location=CAMERA_CENTER_T, rotation=CAMERA_CENTER_ROTATION) CAMERA_LEFT_TRANSFORM = carla.Transform( location=CAMERA_LEFT_T, rotation=CAMERA_LEFT_ROTATION) CAMERA_RIGHT_TRANSFORM = carla.Transform( location=CAMERA_RIGHT_T, rotation=CAMERA_RIGHT_ROTATION) self._world = self.carla.get_world() settings = self._world.get_settings() settings.synchronous_mode = True self._world.apply_settings(settings) # add traffic blueprints_vehi = self._world.get_blueprint_library().filter( 'vehicle.*') blueprints_vehi = [ x for x in blueprints_vehi if int(x.get_attribute('number_of_wheels')) == 4 ] blueprints_vehi = [ x for x in blueprints_vehi if not x.id.endswith('isetta') ] # @todo Needs to be converted to list to be shuffled. spawn_points = list(self._world.get_map().get_spawn_points()) if len(spawn_points) == 0: if self.city_name_demo == "Exp_Town": spawn_points = [ carla.Transform( location=carla.Location(x=-11.5, y=-8.0, z=2.0)) ] random.shuffle(spawn_points) print('found %d spawn points.' % len(spawn_points)) # TODO: debug change 50 to 0 count = 0 if count > 0: for spawn_point in spawn_points: if self.try_spawn_random_vehicle_at( blueprints_vehi, spawn_point): count -= 1 if count <= 0: break while count > 0: time.sleep(0.5) if self.try_spawn_random_vehicle_at( blueprints_vehi, random.choice(spawn_points)): count -= 1 # end traffic addition! # begin parking addition if hasattr( self._driver_conf, "parking_position_file" ) and self._driver_conf.parking_position_file is not None: parking_points = self.get_parking_locations( self._driver_conf.parking_position_file) random.shuffle(parking_points) print('found %d parking points.' % len(parking_points)) count = 200 for spawn_point in parking_points: self.try_spawn_random_vehicle_at(blueprints_vehi, spawn_point, False) count -= 1 if count <= 0: break # end of parking addition blueprints = self._world.get_blueprint_library().filter('vehicle') vechile_blueprint = [ e for i, e in enumerate(blueprints) if e.id == 'vehicle.lincoln.mkz2017' ][0] if self._vehicle == None or self._autopilot: if self._autopilot and self._vehicle is not None: self._vehicle.destroy() self._vehicle = None while self._vehicle == None: if self._autopilot: # from designated points if hasattr(self._driver_conf, "extra_explore_prob") and random.random( ) < self._driver_conf.extra_explore_prob: extra_positions = self.get_parking_locations( self._driver_conf.extra_explore_position_file, z_default=3.0, random_perturb=True) print( "spawning hero vehicle from the extra exploration" ) START_POSITION = random.choice(extra_positions) else: START_POSITION = random.choice(spawn_points) else: random_position = start_positions[ np.random.randint(start_positions.shape[0]), :] START_POSITION = carla.Transform( carla.Location(x=random_position[0], y=random_position[1], z=random_position[2] + 1.0), carla.Rotation(pitch=random_position[3], roll=random_position[4], yaw=random_position[5])) self._vehicle = self._world.try_spawn_actor( vechile_blueprint, START_POSITION) else: if self._autopilot: # from designated points START_POSITION = random.choice(spawn_points) else: random_position = start_positions[ np.random.randint(start_positions.shape[0]), :] START_POSITION = carla.Transform( carla.Location(x=random_position[0], y=random_position[1], z=random_position[2] + 1.0), carla.Rotation(pitch=random_position[3], roll=random_position[4], yaw=random_position[5])) self._vehicle.set_transform(START_POSITION) print("after spawning the ego vehicle") print("warm up process to make the vehicle ego location correct") wd = self._world for i in range(25): wd.tick() if not wd.wait_for_tick(10.0): continue print("warmup finished") if self._autopilot: # Nope: self._vehicle.set_autopilot() print("before roaming agent") self._agent_autopilot = RoamingAgent(self._vehicle) print("after roaming agent") if self.collision_sensor is not None: print("before destroying the sensor") self.collision_sensor.sensor.destroy() print("after destroying the sensor") else: print("collision sensor is None") self.collision_sensor = CollisionSensor(self._vehicle, self) print("after spawning the collision sensor") # set weather weather = getattr(carla.WeatherParameters, self._current_weather) self._vehicle.get_world().set_weather(weather) self._spectator = self._world.get_spectator() cam_blueprint = self._world.get_blueprint_library().find( 'sensor.camera.rgb') cam_blueprint.set_attribute('image_size_x', str(WINDOW_WIDTH)) cam_blueprint.set_attribute('image_size_y', str(WINDOW_HEIGHT)) cam_blueprint.set_attribute('fov', str(CAMERA_FOV)) if self._camera_center is not None: self._camera_center.destroy() self._camera_left.destroy() self._camera_right.destroy() self._camera_center = None if self._camera_center == None: self._camera_center = self._world.spawn_actor( cam_blueprint, CAMERA_CENTER_TRANSFORM, attach_to=self._vehicle) self._camera_left = self._world.spawn_actor( cam_blueprint, CAMERA_LEFT_TRANSFORM, attach_to=self._vehicle) self._camera_right = self._world.spawn_actor( cam_blueprint, CAMERA_RIGHT_TRANSFORM, attach_to=self._vehicle) self._camera_center.listen(CallBack('CameraMiddle', self)) self._camera_left.listen(CallBack('CameraLeft', self)) self._camera_right.listen(CallBack('CameraRight', self)) # spectator server camera self._spectator = self._world.get_spectator() self._skiped_frames = 0 self._stucked_counter = 0 self._start_time = time.time() def get_recording(self): if self._autopilot: # debug: 0 for debugging if self._skiped_frames >= 20: return True else: self._skiped_frames += 1 return False else: ''' if (self.joystick.get_button(8)): self._recording = True if (self.joystick.get_button(9)): self._recording = False ''' if (self.joystick.get_button(6)): self._recording = True print("start recording!!!!!!!!!!!!!!!!!!!!!!!!1") if (self.joystick.get_button(7)): self._recording = False print("end recording!!!!!!!!!!!!!!!!!!!!!!!!1") return self._recording def initialize_map(self, city_name): self.city_name_demo = city_name if city_name == "RFS_MAP": path = get_current_folder() + "/maps_demo_area/rfs_demo_area.png" im = cv2.imread(path) im = im[:, :, :3] im = im[:, :, ::-1] self.demo_area_map = im else: print("do nothing since not a city with demo area") #raise ValueError("wrong city name: " + city_name) def loc_to_pix_rfs_sim(self, loc): u = 3.6090651558073654 * loc[1] + 2500.541076487252 v = -3.6103367739019054 * loc[0] + 2501.862578166202 return [int(v), int(u)] def in_valid_area(self, x, y): if self.city_name_demo == "RFS_MAP": pos = self.loc_to_pix_rfs_sim([x, y]) locality = 50 # 100 pixels local_area = self.demo_area_map[pos[0] - locality:pos[0] + locality, pos[1] - locality:pos[1] + locality, 0] > 0 valid = np.sum(local_area) > 0 if not valid: print( "detect the vehicle is not in the valid demonstrated area") return valid else: return True def get_reset(self): if self._autopilot: if __CARLA_VERSION__ == '0.8.X': # increase the stuck detector if conditions satisfy if self._latest_measurements.player_measurements.forward_speed < 0.1: self._stucked_counter += 1 else: self._stucked_counter = 0 # if within auto pilot, reset if long enough or has collisions if time.time() - self._start_time > self._reset_period \ or self._latest_measurements.player_measurements.collision_vehicles > 0.0 \ or self._latest_measurements.player_measurements.collision_pedestrians > 0.0 \ or self._latest_measurements.player_measurements.collision_other > 0.0 \ or (self._latest_measurements.player_measurements.intersection_otherlane > 0.0 and self._latest_measurements.player_measurements.autopilot_control.steer < -0.99) \ or self._stucked_counter > 250: if self._stucked_counter > 250: reset_because_stuck = True else: reset_because_stuck = False # TODO: commenting out this for debugging issue self._reset() if reset_because_stuck: print("resetting because getting stucked.....") return True else: # TODO: implement the collision detection algorithm, based on the new API if self.last_estimated_speed < 0.1: self._stucked_counter += 1 else: self._stucked_counter = 0 if time.time() - self._start_time > self._reset_period \ or self._last_collided \ or self._stucked_counter > 250 \ or not self.in_valid_area(self._latest_measurements.player_measurements.transform.location.x, self._latest_measurements.player_measurements.transform.location.y): #or np.abs(self._vehicle.get_vehicle_control().steer) > 0.95: #or np.abs(self._vehicle.get_vehicle_control().brake) > 1: # TODO intersection other lane is not available, so omit from the condition right now if self._stucked_counter > 250: reset_because_stuck = True else: reset_because_stuck = False if self._last_collided: print("reset becuase collision") self._reset() if reset_because_stuck: print("resetting because getting stucked.....") return True else: pass return False def get_waypoints(self): # TODO: waiting for German Ros to expose the waypoints wp1 = [1.0, 1.0] wp2 = [2.0, 2.0] return [wp1, wp2] def action_joystick(self): # joystick steering_axis = self.joystick.get_axis(0) acc_axis = self.joystick.get_axis(2) brake_axis = self.joystick.get_axis(5) # print("axis 0 %f, axis 2 %f, axis 3 %f" % (steering_axis, acc_axis, brake_axis)) if (self.joystick.get_button(3)): self._rear = True if (self.joystick.get_button(2)): self._rear = False control = VehicleControl() control.steer = steering_axis control.throttle = (acc_axis + 1) / 2.0 control.brake = (brake_axis + 1) / 2.0 if control.brake < 0.001: control.brake = 0.0 control.hand_brake = 0 control.reverse = self._rear control.steer -= 0.0822 #print("steer %f, throttle %f, brake %f" % (control.steer, control.throttle, control.brake)) pygame.event.pump() return control def action_steering_wheel(self, jsInputs, jsButtons): control = VehicleControl() # Custom function to map range of inputs [1, -1] to outputs [0, 1] i.e 1 from inputs means nothing is pressed # For the steering, it seems fine as it is K1 = 1.0 # 0.55 steerCmd = K1 * math.tan(1.1 * jsInputs[self._steer_idx]) K2 = 1.6 # 1.6 throttleCmd = K2 + ( 2.05 * math.log10(-0.7 * jsInputs[self._throttle_idx] + 1.4) - 1.2) / 0.92 if throttleCmd <= 0: throttleCmd = 0 elif throttleCmd > 1: throttleCmd = 1 brakeCmd = 1.6 + (2.05 * math.log10(-0.7 * jsInputs[self._brake_idx] + 1.4) - 1.2) / 0.92 if brakeCmd <= 0: brakeCmd = 0 elif brakeCmd > 1: brakeCmd = 1 #print("Steer Cmd, ", steerCmd, "Brake Cmd", brakeCmd, "ThrottleCmd", throttleCmd) control.steer = steerCmd control.brake = brakeCmd control.throttle = throttleCmd toggle = jsButtons[self._reverse_idx] if toggle == 1: self._is_on_reverse += 1 if self._is_on_reverse % 2 == 0: control.reverse = False if self._is_on_reverse > 1: self._is_on_reverse = True if self._is_on_reverse: control.reverse = True control.hand_brake = False # jsButtons[self.handbrake_idx] return control def compute_action(self, sensor, speed): if not self._autopilot: # get pygame input for event in pygame.event.get(): # Possible joystick actions: JOYAXISMOTION JOYBALLMOTION JOYBUTTONDOWN # JOYBUTTONUP JOYHATMOTION if event.type == pygame.JOYBUTTONDOWN: if event.__dict__['button'] == 0: self._current_command = 2.0 if event.__dict__['button'] == 1: self._current_command = 3.0 if event.__dict__['button'] == 2: self._current_command = 4.0 if event.__dict__['button'] == 3: self._current_command = 5.0 if event.__dict__['button'] == 23: self._current_command = 0.0 if event.__dict__['button'] == 4: self._reset() return VehicleControl() if event.type == pygame.JOYBUTTONUP: self._current_command = 2.0 #pygame.event.pump() numAxes = self.joystick.get_numaxes() jsInputs = [ float(self.joystick.get_axis(i)) for i in range(numAxes) ] # print (jsInputs) jsButtons = [ float(self.joystick.get_button(i)) for i in range(self.joystick.get_numbuttons()) ] if self._steering_wheel_flag: control = self.action_steering_wheel(jsInputs, jsButtons) else: control = self.action_joystick() else: if __CARLA_VERSION__ == '0.8.X': # This relies on the calling of get_sensor_data, otherwise self._latest_measurements are not filled control = self._latest_measurements.player_measurements.autopilot_control print('[Throttle = {}] [Steering = {}] [Brake = {}]'.format( control.throttle, control.steer, control.brake)) else: self._world.tick() if self._world.wait_for_tick(10.0): control, self._current_command = self._agent_autopilot.run_step( ) print('[Throttle = {}] [Steering = {}] [Brake = {}]'.format( control.throttle, control.steer, control.brake)) return control def estimate_speed(self): vel = self._vehicle.get_velocity() speed = m.sqrt(vel.x**2 + vel.y**2 + vel.z**2) # speed in m/s return speed def get_sensor_data(self, goal_pos=None, goal_ori=None): if __CARLA_VERSION__ == '0.8.X': # return the latest measurement and the next direction measurements, sensor_data = self.carla.read_data() self._latest_measurements = measurements if self.use_planner: player_data = measurements.player_measurements pos = [ player_data.transform.location.x, player_data.transform.location.y, 0.22 ] ori = [ player_data.transform.orientation.x, player_data.transform.orientation.y, player_data.transform.orientation.z ] if sldist([ player_data.transform.location.x, player_data.transform.location.y ], [ self.positions[self.episode_config[1]].location.x, self.positions[self.episode_config[1]].location.y ]) < self._goal_reaching_threshold: self._reset() direction = self.planner.get_next_command( pos, ori, [ self.positions[self.episode_config[1]].location.x, self.positions[self.episode_config[1]].location.y, 0.22 ], (1, 0, 0)) else: direction = 2.0 else: self.last_timestamp.elapsed_seconds += 0.2 #self.last_timestamp = self.carla.get_world().wait_for_tick(30.0) #print(timestamp.delta_seconds, "delta seconds") #while self.update_once == False: # time.sleep(0.01) self.last_estimated_speed = self.estimate_speed() data_buffer_lock.acquire() sensor_data = copy.deepcopy(self._data_buffers) data_buffer_lock.release() #self.update_once = False collision_lock.acquire() colllision_event = self._collision_events self._last_collided = len(colllision_event) > 0 self._collision_events = [] collision_lock.release() if len(colllision_event) > 0: print(colllision_event) # TODO: make sure those events are actually valid if 'Static' in colllision_event: collision_other = 1.0 else: collision_other = 0.0 if "Vehicles" in colllision_event: collision_vehicles = 1.0 else: collision_vehicles = 0.0 if "Pedestrians" in colllision_event: collision_pedestrians = 1.0 else: collision_pedestrians = 0.0 #current_ms_offset = int(math.ceil((datetime.now() - self._episode_t0).total_seconds() * 1000)) # TODO: get a gametime stamp, instead of os timestamp #current_ms_offset = int(self.carla.get_timestamp().elapsed_seconds * 1000) #print(current_ms_offset, "ms offset") current_ms_offset = self.last_timestamp.elapsed_seconds * 1000 second_level = namedtuple('second_level', [ 'forward_speed', 'transform', 'collision_other', 'collision_pedestrians', 'collision_vehicles' ]) transform = namedtuple('transform', ['location', 'orientation']) loc = namedtuple('loc', ['x', 'y']) ori = namedtuple('ori', ['x', 'y', 'z']) Meas = namedtuple('Meas', ['player_measurements', 'game_timestamp']) v_transform = self._vehicle.get_transform() measurements = Meas( second_level( self.last_estimated_speed, transform( loc(v_transform.location.x, v_transform.location.y), ori(v_transform.rotation.pitch, v_transform.rotation.roll, v_transform.rotation.yaw)), collision_other, collision_pedestrians, collision_vehicles), current_ms_offset) direction = self._current_command self._latest_measurements = measurements #print('[Speed = {} Km/h] [Direction = {}]'.format(measurements.player_measurements.forward_speed, direction)) #print(">>>>> planner output direction: ", direction) return measurements, sensor_data, direction def act(self, control): if __CARLA_VERSION__ == '0.8.X': self.carla.send_control(control) else: self._vehicle.apply_control(control)
class CrawlerConfig(): def __init__(self, cfg_file='../config/crawler.config'): self.config = SafeConfigParser() self.config.read(cfg_file) self.app_log = self.config.get('default', 'app_log').strip() self.adb_log = self.config.get('default', 'adb_log').strip() self.net_ip = self.get_ip_address() self.proxy = self.config.get('default', 'proxy').strip() self.retries = self.config.get('default', 'retries').strip() self.redirection_timeout = int( self.config.get('default', 'redirection_timeout')) self.test = True if ((self.config.get('test', 'test_run').strip()) == "True") else False self.shuffle = True if ((self.config.get('test', 'shuffle').strip()) == "True") else False self.mal_sites = self.getMalwareSites() self.emulated = True if ((self.config.get( 'emulator', 'emulated').strip()) == "True") else False self.num_sites = int(self.config.get('test', 'sample_size').strip()) self.start_from = int(self.config.get('test', 'start_from').strip()) self.redirection_timeout = int( self.config.get('default', 'redirection_timeout').strip()) self.studio_path = self.config.get('emulator', 'studio_path').strip() self.emulator = self.config.get('emulator', 'emulator').strip() self.pin = self.config.get('emulator', 'pin').strip() self.avds = [(x.strip()).encode('ascii', 'ignore') for x in\ (self.config.get('emulator','avds').split(','))] self.emu_start_delay = int( self.config.get('emulator', 'start_delay').strip()) def getMalwareSites(self): sites = [] malware_sites = self.config.get('default', 'mal_sites_file') with open(malware_sites) as sfp: creader = csv.reader(sfp) for row in creader: sites.append(row[0]) # shuffle the list if self.shuffle: random.shuffle(sites) return sites '''get the ip of iface connected to net''' @staticmethod def get_ip_address(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(("8.8.8.8", 80)) return s.getsockname()[0] except Exception as e: print("No internet connection!") return None
def _config_to_configparser(self, config): """ Return a ConfigParser instance representing a given config dictionary. Args: config (dict): Dictionary of config key/value pairs. Returns: SafeConfigParser: SafeConfigParser instance representing config. """ def get_store(): return config['store'] def get_day_start(): return config['day_start'].strftime('%H:%M:%S') def get_fact_min_delta(): return text_type(config['fact_min_delta']) def get_tmpfile_path(): return text_type(config['tmpfile_path']) def get_db_engine(): return config['db_engine'] def get_db_path(): return text_type(config['db_path']) def get_autocomplete_activities_range(): return text_type(config['autocomplete_activities_range']) def get_autocomplete_split_activity(): return text_type(config['autocomplete_split_activity']) cp_instance = SafeConfigParser() cp_instance.add_section('Backend') cp_instance.set('Backend', 'store', get_store()) cp_instance.set('Backend', 'day_start', get_day_start()) cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta()) cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path()) cp_instance.set('Backend', 'db_engine', get_db_engine()) cp_instance.set('Backend', 'db_path', get_db_path()) cp_instance.add_section('Frontend') cp_instance.set('Frontend', 'autocomplete_activities_range', get_autocomplete_activities_range()) cp_instance.set('Frontend', 'autocomplete_split_activity', get_autocomplete_split_activity()) return cp_instance
class GPM(SearchGranules): def __init__(self, prjName, prdLv, prdVer, **kwargs): ''' prjName : e.g.) 'GPM.KuPR' prdLv : e.g.) 'L2' prdVer : e.g.) '02' ''' modroot = os.path.dirname(__file__) self.cfg = SafeConfigParser(os.environ) self.cfg.read(os.path.join(modroot, 'config')) self.cfg._sections['Defaults'].update(kwargs) if self.cfg.get('Defaults', 'dataroot') == '': self.cfg.set('Defaults', 'dataroot', os.environ['PWD']) self.dataDir = self.cfg.get('Defaults', 'dataroot') self.prjName = prjName self.prdLv = prdLv self.prdVer = prdVer self.prdDir = os.path.join(self.dataDir, self.prjName, self.prdLv, self.prdVer) self.cached = self.cfg.get('Defaults', 'cached') self.cacheDir = self.cfg.get('Defaults', 'cache_dir') fnPath = { 'TRMM': self.cfg.get('Defaults', 'hdf4_module'), 'GPM': self.cfg.get('Defaults', 'hdf5_module') }[prjName.split('.')[0]] fnName = fnPath.split('.')[-1] modPath = '.'.join(fnPath.split('.')[:-1]) self.func_read = getattr(importlib.import_module(modPath), 'read_hdf5') ''' self.cacheDir = os.path.join( self.dataDir, 'cache.dim', self.prjName, self.prdLv, self.prdVer) self.prdDir = '%s/%s/%s/%s'%(self.dataDir, self.prjName, self.prdLv, self.prdVer) self.cacheDir = '%s/cache.dim/%s/%s/%s'%(self.dataDir, self.prjName, self.prdLv, self.prdVer) self.func_read = {'TRMM': read_hdf4, 'GPM' : read_hdf5}[ prjName.split('.')[0] ] ''' ''' dictGrp = {'GPM.GMI':'S1', 'GPM.DPR':'NS', # HS, MS, NS 'GPM.KaPR':'MS', # HS, MS 'GPM.KuPR':'NS',} grpCode = dictGrp[ self.prjName ] ''' def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None): ''' res : spa. res. of 2d-array sDTime : DTime bound left eDTime : DTime bound right ''' mapCode = '^' + ''.join(str(res).split('.')) gpmData = GPM_data() srcDir = os.path.join(self.dataDir, self.prdDir) assert os.path.exists(srcDir), '{} is not exists.'.format(srcDir) Granule = self.search_granules(srcDir, sDTime, eDTime, BBox) if len(Granule) == 0: print('! Warning ! no data extracted') return None outSize = sum([len(gra[2]) for gra in Granule]), Granule[0][2].shape[1] Lat = empty(outSize, 'float32') Lon = empty(outSize, 'float32') aOut = empty(outSize, 'float32') DTime = [] prvI = 0 for granule in Granule: srcPath, dtime, lat, lon, idx = granule gpmData.srcPath.append(srcPath) gpmData.recLen.append( len(dtime)) # number of data record for each file nxtI = prvI + len(dtime) aOut[prvI:nxtI] = self.func_read(srcPath, varName, idx.tolist()) Lat[prvI:nxtI] = lat Lon[prvI:nxtI] = lon DTime.extend(dtime) if res != None and delT == None: gpmData.griddata.append( granule2map(lat, lon, aOut[prvI:nxtI], BBox, res)) gpmData.grid = GridCoordinates(mapCode, BBox=BBox) prvI = nxtI if delT != None: dtBnd = dtrange(sDTime, eDTime, delT) gpmData.tbound = map(None, dtBnd[:-1], dtBnd[1:]) gpmData.dtime = bin_bytbound(DTime, dtBnd, DTime) gpmData.lat = bin_bytbound(DTime, dtBnd, Lat) gpmData.lon = bin_bytbound(DTime, dtBnd, Lon) gpmData.data = bin_bytbound(DTime, dtBnd, aOut) if res != None: gpmData.griddata = [ granule2map(lat, lon, a, BBox, res) for lat, lon, a in map( None, gpmData.lat, gpmData.lon, gpmData.data) ] gpmData.grid = GridCoordinates(mapCode, BBox=BBox) else: gpmData.dtime = DTime gpmData.lat = Lat gpmData.lon = Lon gpmData.data = aOut return gpmData
class ModelGeneration: def __init__(self, file): # Script starts from here print("Arguement is : ", file) self.fname = os.path.basename(file) print("VHDL filename is : ", self.fname) self.home = os.path.expanduser("~") self.parser = SafeConfigParser() self.parser.read( os.path.join(self.home, os.path.join('.nghdl', 'config.ini'))) self.ngspice_home = self.parser.get('NGSPICE', 'NGSPICE_HOME') self.release_dir = self.parser.get('NGSPICE', 'RELEASE') self.src_home = self.parser.get('SRC', 'SRC_HOME') self.licensefile = self.parser.get('SRC', 'LICENSE') # #### Creating connection_info.txt file from vhdl file #### # read_vhdl = open(file, 'r') vhdl_data = read_vhdl.readlines() read_vhdl.close() start_flag = -1 # Used for scaning part of data scan_data = [] # p=re.search('port(.*?)end',read_vhdl,re.M|re.I|re.DOTALL).group() for item in vhdl_data: if re.search('port', item, re.I): start_flag = 1 elif re.search("end", item, re.I): start_flag = 0 if start_flag == 1: item = re.sub("port", " ", item, flags=re.I) item = re.sub("\(", " ", item, flags=re.I) # noqa item = re.sub("\)", " ", item, flags=re.I) # noqa item = re.sub(";", " ", item, flags=re.I) scan_data.append(item.rstrip()) scan_data = [_f for _f in scan_data if _f] elif start_flag == 0: break port_info = [] self.port_vector_info = [] for item in scan_data: print("Scan Data :", item) if re.search("in", item, flags=re.I): if re.search("std_logic_vector", item, flags=re.I): temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I) elif re.search("std_logic", item, flags=re.I): temp = re.compile(r"\s*std_logic\s*", flags=re.I) else: raise ValueError("Please check your vhdl " + "code for datatype of input port") elif re.search("out", item, flags=re.I): if re.search("std_logic_vector", item, flags=re.I): temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I) elif re.search("std_logic", item, flags=re.I): temp = re.compile(r"\s*std_logic\s*", flags=re.I) else: raise ValueError("Please check your vhdl " + "code for datatype of output port") else: raise ValueError( "Please check the in/out direction of your port") lhs = temp.split(item)[0] rhs = temp.split(item)[1] bit_info = re.compile(r"\s*downto\s*", flags=re.I).split(rhs)[0] if bit_info: port_info.append(lhs + ":" + str(int(bit_info) + int(1))) self.port_vector_info.append(1) else: port_info.append(lhs + ":" + str(int(1))) self.port_vector_info.append(0) print("Port Info :", port_info) # Open connection_info.txt file con_ifo = open('connection_info.txt', 'w') for item in port_info: word = item.split(':') con_ifo.write(word[0].strip() + ' ' + word[1].strip() + ' ' + word[2].strip()) con_ifo.write("\n") con_ifo.close() def readPortInfo(self): # ############## Reading connection/port information ############## # # Declaring input and output list input_list = [] output_list = [] # Reading connection_info.txt file for port infomation read_file = open('connection_info.txt', 'r') data = read_file.readlines() read_file.close() # Extracting input and output port list from data print("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") for line in data: print(line) if re.match(r'^\s*$', line): pass else: in_items = re.findall("IN", line, re.MULTILINE | re.IGNORECASE) out_items = re.findall("OUT", line, re.MULTILINE | re.IGNORECASE) if in_items: input_list.append(line.split()) if out_items: output_list.append(line.split()) print("Inout List :", input_list) print("Output list", output_list) self.input_port = [] self.output_port = [] # creating list of input and output port with its weight for input in input_list: self.input_port.append(input[0] + ":" + input[2]) for output in output_list: self.output_port.append(output[0] + ":" + output[2]) print("Output Port List : ", self.output_port) print("Input Port List : ", self.input_port) def createCfuncModFile(self): # ############## Creating content for cfunc.mod file ############## # print("Starting With cfunc.mod file") cfunc = open('cfunc.mod', 'w') print("Building content for cfunc.mod file") comment = '''/* This is cfunc.mod file auto generated by gen_con_info.py Developed by Fahim, Rahul at IIT Bombay */\n ''' header = ''' #include <stdio.h> #include <math.h> #include <string.h> #include <time.h> #include <sys/types.h> #include <stdlib.h> #include <unistd.h> #include <errno.h> ''' if os.name == 'nt': header += ''' #undef BOOLEAN #include<winsock2.h> ''' else: header += ''' #include <sys/socket.h> #include <netinet/in.h> #include <netdb.h> ''' function_open = ('''void cm_''' + self.fname.split('.')[0] + '''(ARGS) \n{''') digital_state_output = [] for item in self.output_port: digital_state_output.append("Digital_State_t *_op_" + item.split(':')[0] + ", *_op_" + item.split(':')[0] + "_old;") var_section = ''' // Declaring components of Client FILE *log_client = NULL; log_client=fopen("client.log","a"); int bytes_recieved; char send_data[1024]; char recv_data[1024]; char *key_iter; struct hostent *host; struct sockaddr_in server_addr; int sock_port = 5000+PARAM(instance_id); ''' if os.name != 'nt': var_section += ''' int socket_fd; ''' temp_input_var = [] for item in self.input_port: temp_input_var.append("char temp_" + item.split(':')[0] + "[1024];") # Start of INIT function init_start_function = ''' if(INIT) { /* Allocate storage for output ports ''' \ '''and set the load for input ports */ ''' cm_event_alloc = [] cm_count_output = 0 for item in self.output_port: cm_event_alloc.append("cm_event_alloc(" + str(cm_count_output) + "," + item.split(':')[1] + "*sizeof(Digital_State_t));") cm_count_output = cm_count_output + 1 load_in_port = [] for item in self.input_port: load_in_port.append("for(Ii=0;Ii<PORT_SIZE(" + item.split(':')[0] + ");Ii++)\n\t\t{\n\t\t\tLOAD(" + item.split(':')[0] + "[Ii])=PARAM(input_load); \n\t\t}") cm_count_ptr = 0 cm_event_get_ptr = [] for item in self.output_port: cm_event_get_ptr.append( "_op_" + item.split(':')[0] + " = _op_" + item.split(':')[0] + "_old = (Digital_State_t *) cm_event_get_ptr(" + str(cm_count_ptr) + ",0);") cm_count_ptr = cm_count_ptr + 1 systime_info = ''' /*Taking system time info for log */ time_t systime; systime = time(NULL); printf(ctime(&systime)); printf("Client-Initialising GHDL...\\n\\n"); fprintf(log_client,"Setup Client Server Connection at %s \\n"''' \ ''',ctime(&systime)); ''' els_evt_ptr = [] els_evt_count1 = 0 els_evt_count2 = 0 for item in self.output_port: els_evt_ptr.append("_op_" + item.split(":")[0] + " = (Digital_State_t *) cm_event_get_ptr(" + str(els_evt_count1) + "," + str(els_evt_count2) + ");") els_evt_count2 = els_evt_count2 + 1 els_evt_ptr.append("_op_" + item.split(":")[0] + "_old" + " = (Digital_State_t *) cm_event_get_ptr(" + str(els_evt_count1) + "," + str(els_evt_count2) + ");") els_evt_count1 = els_evt_count1 + 1 client_setup_ip = ''' /* Client Setup IP Addr */ FILE *fptr; int ip_count = 0; char* my_ip = malloc(16); char ip_filename[100]; ''' if os.name == 'nt': client_setup_ip += ''' sprintf(ip_filename, "''' + \ os.getenv('LOCALAPPDATA').replace('\\', '/') + \ '''/Temp/NGHDL_COMMON_IP_%d.txt", getpid()); ''' else: client_setup_ip += ''' sprintf(ip_filename, "/tmp/NGHDL_COMMON_IP_%d.txt",''' \ ''' getpid()); ''' client_setup_ip += ''' fptr = fopen(ip_filename, "r"); if (fptr) { char line_ip[20]; int line_port; while(fscanf(fptr, "%s %d", line_ip, &line_port) == 2) { ip_count++; } fclose(fptr); } if (ip_count < 254) { sprintf(my_ip, "127.0.0.%d", ip_count+1); } else { sprintf(my_ip, "127.0.%d.1", (ip_count+3)%256); } fptr = fopen(ip_filename, "a"); if (fptr) { fprintf(fptr, "%s %d\\n", my_ip, sock_port); fclose(fptr); } else { perror("Client - cannot open Common_IP file "); exit(1); } STATIC_VAR(my_ip) = my_ip; ''' client_fetch_ip = ''' /* Client Fetch IP Addr */ ''' if os.name == 'nt': client_fetch_ip += ''' WSADATA WSAData; SOCKET socket_fd; WSAStartup(MAKEWORD(2, 2), &WSAData); ''' client_fetch_ip += ''' char* my_ip = STATIC_VAR(my_ip); host = gethostbyname(my_ip); fprintf(log_client,"Creating client socket \\n"); ''' create_socket = ''' //Creating socket for client if ((socket_fd = socket(AF_INET, SOCK_STREAM, 0)) == -1) { perror("Client - Error while creating client Socket "); fprintf(log_client,"Error while creating client socket \\n"); exit(1); } printf("Client-Socket (Id : %d) created\\n", socket_fd); fprintf(log_client,"Client-Client Socket created ''' \ '''successfully \\n"); fprintf(log_client,"Client- Socket Id : %d \\n",socket_fd); // memset(&server_addr, 0, sizeof(server_addr)); server_addr.sin_family = AF_INET; server_addr.sin_port = htons(sock_port); server_addr.sin_addr = *((struct in_addr *)host->h_addr); bzero(&(server_addr.sin_zero),8); ''' connect_server = ''' fprintf(log_client,"Client-Connecting to server \\n"); //Connecting to server int try_limit=10; while(try_limit>0) { if (connect(socket_fd, (struct sockaddr*)&server_addr,''' \ '''sizeof(struct sockaddr)) == -1) { sleep(1); try_limit--; if(try_limit==0) { fprintf(stderr,"Connect- Error:Tried to connect server on port,''' \ '''failed...giving up \\n"); fprintf(log_client,"Connect- Error:Tried to connect server on ''' \ '''port, failed...giving up \\n"); exit(1); } } else { printf("Client-Connected to server \\n"); fprintf(log_client,"Client-Connected to server \\n"); break; } } ''' # Assign bit value to every input assign_data_to_input = [] for item in self.input_port: assign_data_to_input.append("\tfor(Ii=0;Ii<PORT_SIZE(" + item.split(':')[0] + ");Ii++)\n\ \t{\n\t\tif( INPUT_STATE(" + item.split(':')[0] + "[Ii])==ZERO )\n\ \t\t{\n\t\t\ttemp_" + item.split(':')[0] + "[Ii]='0';\n\t\t}\n\ \t\telse\n\t\t{\n\t\t\ttemp_" + item.split(':')[0] + "[Ii]='1';\n\ \t\t}\n\t}\n\ttemp_" + item.split(':')[0] + "[Ii]='\\0';\n\n") snprintf_stmt = [] snprintf_count = 0 snprintf_stmt.append( "\t//Sending and receiving data to-from server \n") snprintf_stmt.append('\tsnprintf(send_data,sizeof(send_data),"') for item in self.input_port: snprintf_count = snprintf_count + 1 snprintf_stmt.append(item.split(':')[0] + ":%s") if snprintf_count == len(self.input_port): snprintf_stmt.append('", ') internal_count = 0 for item1 in self.input_port: if internal_count == len(self.input_port): pass else: snprintf_stmt.append("temp_" + item1.split(':')[0]) internal_count = internal_count + 1 if internal_count == len(self.input_port): pass else: snprintf_stmt.append(",") snprintf_stmt.append(");") else: snprintf_stmt.append(",") send_data = ''' if ( send(socket_fd,send_data,sizeof(send_data),0)==-1) { fprintf(stderr, "Client-Failure Sending Message \\n"); ''' if os.name == 'nt': send_data += ''' closesocket(socket_fd); ''' else: send_data += ''' close(socket_fd); ''' send_data += ''' exit(1); } else { printf("Client-Message sent: %s \\n",send_data); fprintf(log_client,"Socket Id : %d & Message sent : %s \\n"''' \ ''',socket_fd,send_data); } ''' recv_data = ''' bytes_recieved=recv(socket_fd,recv_data,sizeof(recv_data),0); if ( bytes_recieved <= 0 ) { perror("Client-Either Connection Closed or Error "); exit(1); } recv_data[bytes_recieved] = '\\0'; printf("Client-Message Received - %s\\n\\n",recv_data); fprintf(log_client,"Message Received From Server-''' \ '''%s\\n",recv_data); ''' # Scheduling output event sch_output_event = [] for item in self.output_port: sch_output_event.append( "\t/* Scheduling event and processing them */\n\ \tif((key_iter=strstr(recv_data, " + '"' + item.split(':')[0] + ':"' ")) != NULL)\n\ \t{\n\ \t\twhile(*key_iter++ != ':');\n\ \t\tfor(Ii=0;*key_iter != ';';Ii++,key_iter++)\n\ \t\t{\n\ \t\t\tfprintf(log_client,\"Client-Bit val is %c \\n\",*key_iter);\n\ \t\t\tif(*key_iter=='0')\n\t\t\t{\n\ \t\t\t\t_op_" + item.split(':')[0] + "[Ii]=ZERO;\n\t\t\t}\n\ \t\t\telse if(*key_iter=='1')\n\t\t\t{\n\ \t\t\t\t_op_" + item.split(':')[0] + "[Ii]=ONE;\n\ \t\t\t}\n\t\t\telse\n\t\t\t{\n\ \t\t\t\tfprintf(log_client,\"Unknown value return from server \\n\");\ \n\t\t\t\tprintf(\"Client-Unknown value return \\n\");\n\t\t\t}\n\n\ \t\t\tif(ANALYSIS == DC)\n\t\t\t{\n\ \t\t\t\tOUTPUT_STATE(" + item.split(':')[0] + "[Ii]) = _op_" + item.split(':')[0] + "[Ii];\n\ \t\t\t}\n\t\t\telse if(_op_" + item.split(':')[0] + "[Ii] != _op_" + item.split(':')[0] + "_old[Ii])\n\ \t\t\t{\n\t\t\t\tOUTPUT_STATE(" + item.split(':')[0] + "[Ii]) = _op_" + item.split(':')[0] + "[Ii];\n\ \t\t\t\tOUTPUT_DELAY(" + item.split(':')[0] + "[Ii]) = ((_op_" + item.split(':')[0] + "[Ii] == ZERO) ? PARAM(fall_delay) : PARAM(rise_delay));\n\ \t\t\t}\n\t\t\telse\n\t\t\t{\n\ \t\t\t\tOUTPUT_CHANGED(" + item.split(':')[0] + "[Ii]) = FALSE;\n\t\t\t}\n\ \t\t\tOUTPUT_STRENGTH(" + item.split(':')[0] + "[Ii]) = STRONG;\n\ \t\t}\n\ \t}\n") # Writing content in cfunc.mod file cfunc.write(comment) cfunc.write(header) cfunc.write("\n") cfunc.write(function_open) cfunc.write("\n") # Adding digital state Variable for item in digital_state_output: cfunc.write("\t" + item + "\n") # Adding variable declaration section cfunc.write(var_section) for item in temp_input_var: cfunc.write("\t" + item + "\n") cfunc.write("\n") # Adding INIT portion cfunc.write(init_start_function) for item in cm_event_alloc: cfunc.write(2 * "\t" + item) cfunc.write("\n") cfunc.write(2 * "\t" + "/* set the load for input ports. */") cfunc.write("\n") cfunc.write(2 * "\t" + "int Ii;") cfunc.write("\n") for item in load_in_port: cfunc.write(2 * "\t" + item) cfunc.write("\n") cfunc.write("\n") cfunc.write(2 * "\t" + "/*Retrieve Storage for output*/") cfunc.write("\n") for item in cm_event_get_ptr: cfunc.write(2 * "\t" + item) cfunc.write("\n") cfunc.write(systime_info) cfunc.write("\n") cfunc.write(client_setup_ip) cfunc.write("\n") cfunc.write("\t\tchar command[1024];\n") if os.name == 'nt': self.digital_home = self.parser.get('NGSPICE', 'DIGITAL_MODEL') self.msys_home = self.parser.get('COMPILER', 'MSYS_HOME') cmd_str2 = "/start_server.sh %d %s & read" + "\\" + "\"" + "\"" cmd_str1 = os.path.normpath("\"" + self.digital_home + "/" + self.fname.split('.')[0] + "/DUTghdl/") cmd_str1 = cmd_str1.replace("\\", "/") cfunc.write('\t\tsnprintf(command,1024, "start mintty.exe -t ' + '\\"VHDL-Testbench Logs\\" -h always bash.exe -c ' + '\\' + cmd_str1 + cmd_str2 + ', sock_port, my_ip);') else: cfunc.write( '\t\tsnprintf(command,1024,"' + self.home + '/ngspice-nghdl/src/xspice/icm/ghdl/' + self.fname.split('.')[0] + '/DUTghdl/start_server.sh %d %s &", sock_port, my_ip);') cfunc.write('\n\t\tsystem(command);') cfunc.write("\n\t}") cfunc.write("\n") cfunc.write("\telse\n\t{\n") for item in els_evt_ptr: cfunc.write(2 * "\t" + item) cfunc.write("\n") cfunc.write("\t}") cfunc.write("\n\n") cfunc.write(client_fetch_ip) cfunc.write(create_socket) cfunc.write(connect_server) cfunc.write("\t//Formating data for sending it to client\n") cfunc.write("\tint Ii;\n\n") for item in assign_data_to_input: cfunc.write(item) for item in snprintf_stmt: cfunc.write(item) cfunc.write(send_data) cfunc.write(recv_data) for item in sch_output_event: cfunc.write(item) # Close socket fd if os.name == 'nt': cfunc.write("\tclosesocket(socket_fd);\n\n") else: cfunc.write("\tclose(socket_fd);\n\n") # close log_client file cfunc.write("\tfclose(log_client);") # Close cm_ function cfunc.write("\n}") cfunc.close() def createIfSpecFile(self): # ################### Creating ifspec.ifs file #################### # print("Starting with ifspec.ifs file") ifspec = open('ifspec.ifs', 'w') print("Gathering Al the content for ifspec file") ifspec_comment = ''' /* SUMMARY: This file is auto generated and it contains the interface specification for the code model. */\n ''' name_table = 'NAME_TABLE:\n\ C_Function_Name: cm_' + self.fname.split('.')[0] + '\n\ Spice_Model_Name: ' + self.fname.split('.')[0] + '\n\ Description: "Model generated from ghdl code ' + self.fname + '" \n' # Input and Output Port Table in_port_table = [] out_port_table = [] for item in self.input_port: port_table = 'PORT_TABLE:\n' port_name = 'Port_Name:\t' + item.split(':')[0] + '\n' description = ('Description:\t"input port ' + item.split(':')[0] + '"\n') direction = 'Direction:\tin\n' default_type = 'Default_Type:\td\n' allowed_type = 'Allowed_Types:\t[d]\n' vector = 'Vector:\tyes\n' vector_bounds = ('Vector_Bounds:\t[' + item.split(':')[1] + ' ' + item.split(":")[1] + ']\n') null_allowed = 'Null_Allowed:\tno\n' # Insert detail in the list in_port_table.append(port_table + port_name + description + direction + default_type + allowed_type + vector + vector_bounds + null_allowed) for item in self.output_port: port_table = 'PORT_TABLE:\n' port_name = 'Port_Name:\t' + item.split(':')[0] + '\n' description = ('Description:\t"output port ' + item.split(':')[0] + '"\n') direction = 'Direction:\tout\n' default_type = 'Default_Type:\td\n' allowed_type = 'Allowed_Types:\t[d]\n' vector = 'Vector:\tyes\n' vector_bounds = ('Vector_Bounds:\t[' + item.split(':')[1] + ' ' + item.split(":")[1] + ']\n') null_allowed = 'Null_Allowed:\tno\n' # Insert detail in the list in_port_table.append(port_table + port_name + description + direction + default_type + allowed_type + vector + vector_bounds + null_allowed) parameter_table = ''' PARAMETER_TABLE: Parameter_Name: instance_id input_load Description: "instance_id" "input load value (F)" Data_Type: real real Default_Value: 0 1.0e-12 Limits: - - Vector: no no Vector_Bounds: - - Null_Allowed: yes yes PARAMETER_TABLE: Parameter_Name: rise_delay fall_delay Description: "rise delay" "fall delay" Data_Type: real real Default_Value: 1.0e-9 1.0e-9 Limits: [1e-12 -] [1e-12 -] Vector: no no Vector_Bounds: - - Null_Allowed: yes yes ''' static_table = ''' STATIC_VAR_TABLE: Static_Var_Name: my_ip Data_Type: pointer Description: "connect to ghdlserver through this ip" ''' # Writing all the content in ifspec file ifspec.write(ifspec_comment) ifspec.write(name_table + "\n\n") for item in in_port_table: ifspec.write(item + "\n") ifspec.write("\n") for item in out_port_table: ifspec.write(item + "\n") ifspec.write("\n") ifspec.write(parameter_table) ifspec.write("\n") ifspec.write(static_table) ifspec.close() def createTestbench(self): # #################### Creating testbench file ##################### # print("Starting with testbench file") testbench = open(self.fname.split('.')[0] + '_tb.vhdl', 'w') print(self.fname.split('.')[0] + '_tb.vhdl') # comment comment_vhdl = "------------------------------------------------------" comment_vhdl += "--------------------------\n" comment_vhdl += "--This testbench has been created by " comment_vhdl += "Ambikeshwar Srivastava, Rahul Paknikar \n" comment_vhdl += "--------------------------- FOSSEE, IIT Bombay ------" comment_vhdl += "---------------------------\n" comment_vhdl += "-----------------------------------------------------" comment_vhdl += "---------------------------\n" # Adding header, entity and architecture statement tb_header = ''' library ieee; use ieee.std_logic_1164.all; use ieee.numeric_std.all; library work; use work.Vhpi_Foreign.all; use work.Utility_Package.all; use work.sock_pkg.all; ''' tb_entity = ("entity " + self.fname.split('.')[0] + "_tb is\nend entity;\n\n") arch = ("architecture " + self.fname.split('.')[0] + "_tb_beh of " + self.fname.split('.')[0] + "_tb is\n") # Adding components components = [] components.append("\tcomponent " + self.fname.split('.')[0] + " is\n\t\tport(\n\t\t\t\t") port_vector_count = 0 for item in self.input_port: if self.port_vector_info[port_vector_count]: components.append( item.split(':')[0] + ": in std_logic_vector(" + str(int(item.split(':')[1]) - int(1)) + " downto 0);\n\t\t\t\t") else: components.append( item.split(':')[0] + ": in std_logic;\n\t\t\t\t") port_vector_count += 1 # if item.split(":")[1] != '1': # components.append( # item.split(':')[0] + ": in std_logic_vector(" + # str(int(item.split(':')[1])-int(1)) + " downto 0);" + # "\n\t\t\t\t" # ) # else: # components.append( # item.split(':')[0] + ": in std_logic_vector(" + # str(int(item.split(':')[1])-int(1)) + " downto 0);" + # "\n\t\t\t\t" # ) for item in self.output_port[:-1]: if self.port_vector_info[port_vector_count]: components.append( item.split(':')[0] + ": out std_logic_vector(" + str(int(item.split(':')[1]) - int(1)) + " downto 0);\n\t\t\t\t") else: components.append( item.split(':')[0] + ": out std_logic;\n\t\t\t\t") port_vector_count += 1 if self.port_vector_info[port_vector_count]: components.append( self.output_port[-1].split(':')[0] + ": out std_logic_vector(" + str(int(self.output_port[-1].split(':')[1]) - int(1)) + " downto 0)\n\t\t\t\t") else: components.append(self.output_port[-1].split(':')[0] + ": out std_logic\n\t\t\t\t") # if item.split(":")[1] != '1': # components.append(item.split(':')[0]+": # out std_logic_vector(" # +str(int(item.split(':')[1])-int(1))+" downto 0)\n\t\t\t\t") # else: # components.append(item.split(':')[0]+": # out std_logic_vector(" # +str(int(item.split(':')[1])-int(1))+" downto 0)\n\t\t\t\t") components.append(");\n") components.append("\tend component;\n\n") # Adding signals signals = [] signals.append("\tsignal clk_s : std_logic := '0';\n") port_vector_count = 0 for item in self.input_port: if self.port_vector_info[port_vector_count]: signals.append("\tsignal " + item.split(':')[0] + ": std_logic_vector(" + str(int(item.split(':')[1]) - int(1)) + " downto 0);\n") else: signals.append("\tsignal " + item.split(':')[0] + ": std_logic;\n") port_vector_count += 1 # if item.split(":")[1] != '1': # signals.append("\tsignal "+item.split(':')[0]+": # std_logic_vector("+str(int(item.split(':')[1])- # int(1))+" downto 0);\n") # else: # signals.append("\tsignal "+item.split(':')[0]+": # std_logic_vector("+str(int(item.split(':')[1])- # int(1))+" downto 0);\n") for item in self.output_port: if self.port_vector_info[port_vector_count]: signals.append("\tsignal " + item.split(':')[0] + ": std_logic_vector(" + str(int(item.split(':')[1]) - int(1)) + " downto 0);\n") else: signals.append("\tsignal " + item.split(':')[0] + ": std_logic;\n") port_vector_count += 1 # if item.split(":")[1] != '1': # signals.append( # "\tsignal " + item.split(':')[0] + ":std_logic_vector(" + # str(int(item.split(':')[1]) - int(1)) + " downto 0);\n" # ) # else: # signals.append( # "\tsignal " + item.split(':')[0] + ":std_logic_vector(" + # str(int(item.split(':')[1]) - int(1)) + " downto 0);\n" # ) # Adding mapping part map = [] map.append("\tu1 : " + self.fname.split('.')[0] + " port map(\n") for item in self.input_port: map.append("\t\t\t\t" + item.split(':')[0] + " => " + item.split(':')[0] + ",\n") for item in self.output_port: if self.output_port.index(item) == len(self.output_port) - 1: map.append("\t\t\t\t" + item.split(':')[0] + " => " + item.split(':')[0] + "\n") else: map.append("\t\t\t\t" + item.split(':')[0] + " => " + item.split(':')[0] + ",\n") map.append("\t\t\t);") # Testbench Clock tb_clk = "clk_s <= not clk_s after 5 us;\n\n" # Adding Process block for Vhpi process_Vhpi = [] process_Vhpi.append( "process\n\t\tvariable sock_port : integer;" + "\n\t\ttype string_ptr is access string;" + "\n\t\tvariable sock_ip : string_ptr;" + "\n\t\tbegin\n\t\tsock_port := sock_port_fun;" + "\n\t\tsock_ip := new string'(sock_ip_fun);" + "\n\t\tVhpi_Initialize(sock_port," + "Pack_String_To_Vhpi_String(sock_ip.all));" + "\n\t\twait until clk_s = '1';" + "\n\t\twhile true loop\n\t\t\twait until clk_s = '0';" + "\n\t\t\tVhpi_Listen;\n\t\t\twait for 1 us;\n\t\t\t" + "Vhpi_Send;" + "\n\t\tend loop;\n\t\twait;\n\tend process;\n\n") # Adding process block process = [] process.append("\tprocess\n") process.append("\t\tvariable count : integer:=0;\n") for item in self.input_port: process.append("\t\tvariable " + item.split(':')[0] + "_v : VhpiString;\n") for item in self.output_port: process.append("\t\tvariable " + item.split(':')[0] + "_v : VhpiString;\n") process.append("\t\tvariable obj_ref : VhpiString;\n") process.append("\tbegin\n") process.append("\t\twhile true loop\n") process.append("\t\t\twait until clk_s = '0';\n\n") port_vector_count = 0 for item in self.input_port: process.append('\t\t\tobj_ref := Pack_String_To_Vhpi_String("' + item.split(':')[0] + '");\n') process.append('\t\t\tVhpi_Get_Port_Value(obj_ref,' + item.split(':')[0] + '_v,' + item.split(':')[1] + ');\n') if self.port_vector_info[port_vector_count]: process.append('\t\t\t' + item.split(':')[0] + ' <= Unpack_String(' + item.split(':')[0] + '_v,' + item.split(':')[1] + ');\n') else: process.append('\t\t\t' + item.split(':')[0] + ' <= To_Std_Logic(' + item.split(':')[0] + '_v' + ');\n') port_vector_count += 1 process.append("\n") process.append('\t\t\twait for 1 us;\n') for item in self.output_port: if self.port_vector_info[port_vector_count]: process.append('\t\t\t' + item.split(':')[0] + '_v := Pack_String_To_Vhpi_String' + '(Convert_SLV_To_String(' + item.split(':')[0] + '));\n') else: process.append('\t\t\t' + item.split(':')[0] + '_v := Pack_String_To_Vhpi_String(To_String(' + item.split(':')[0] + '));\n') port_vector_count += 1 process.append('\t\t\tobj_ref := Pack_String_To_Vhpi_String("' + item.split(':')[0] + '");\n') process.append('\t\t\tVhpi_Set_Port_Value(obj_ref,' + item.split(':')[0] + '_v,' + item.split(':')[1] + ');\n') process.append("\n") process.append('\t\t\treport "Iteration - "' + "& integer'image(count) severity note;\n") process.append('\t\t\tcount := count + 1;\n') process.append("\t\tend loop;\n") process.append("\tend process;\n\n") process.append("end architecture;") # Writing all the components to testbench file testbench.write(comment_vhdl) testbench.write(tb_header) testbench.write(tb_entity) testbench.write(arch) for item in components: testbench.write(item) for item in signals: testbench.write(item) testbench.write("\n\n") testbench.write("begin\n\n") for item in map: testbench.write(item) testbench.write("\n\t" + tb_clk) for item in process_Vhpi: testbench.write(item) for item in process: testbench.write(item) testbench.close() def createServerScript(self): # ####### Creating and writing components in start_server.sh ####### # self.digital_home = self.parser.get('NGSPICE', 'DIGITAL_MODEL') start_server = open('start_server.sh', 'w') start_server.write("#!/bin/bash\n\n") start_server.write( "###This server run ghdl testebench for infinite time till " + "ngspice send END signal to stop it\n\n") if os.name == 'nt': pathstr = self.digital_home + "/" + \ self.fname.split('.')[0] + "/DUTghdl/" pathstr = pathstr.replace("\\", "/") start_server.write("cd " + pathstr + "\n") else: start_server.write("cd " + self.digital_home + "/" + self.fname.split('.')[0] + "/DUTghdl/\n") start_server.write("chmod 775 sock_pkg_create.sh &&\n") start_server.write("./sock_pkg_create.sh $1 $2 &&\n") start_server.write("ghdl -i *.vhdl &&\n") start_server.write("ghdl -a *.vhdl &&\n") start_server.write("ghdl -a " + self.fname + " &&\n") start_server.write("ghdl -a " + self.fname.split('.')[0] + "_tb.vhdl &&\n") if os.name == 'nt': start_server.write("ghdl -e -Wl,ghdlserver.o " + "-Wl,libws2_32.a " + self.fname.split('.')[0] + "_tb &&\n") start_server.write("./" + self.fname.split('.')[0] + "_tb.exe") else: start_server.write("ghdl -e -Wl,ghdlserver.o " + self.fname.split('.')[0] + "_tb &&\n") start_server.write("./" + self.fname.split('.')[0] + "_tb") start_server.close() def createSockScript(self): # ########### Creating and writing in sock_pkg_create.sh ########### # sock_pkg_create = open('sock_pkg_create.sh', 'w') sock_pkg_create.write("#!/bin/bash\n\n") sock_pkg_create.write( "##This file creates sock_pkg.vhdl file and sets the port " + "and ip from parameters passed to it\n\n") sock_pkg_create.write("echo \"library ieee;\n") sock_pkg_create.write("package sock_pkg is\n") sock_pkg_create.write("\tfunction sock_port_fun return integer;\n") sock_pkg_create.write("\tfunction sock_ip_fun return string;\n") sock_pkg_create.write("end;\n\n") sock_pkg_create.write("package body sock_pkg is\n") sock_pkg_create.write("\tfunction sock_port_fun return integer is\n") sock_pkg_create.write("\t\tvariable sock_port : integer;\n") sock_pkg_create.write("\t\t\tbegin\n") sock_pkg_create.write("\t\t\t\tsock_port := $1;\n") sock_pkg_create.write("\t\t\t\treturn sock_port;\n") sock_pkg_create.write("\t\t\tend function;\n\n") sock_pkg_create.write("\tfunction sock_ip_fun return string is\n") sock_pkg_create.write("\t\ttype string_ptr is access string;\n") sock_pkg_create.write("\t\tvariable sock_ip : string_ptr;\n") sock_pkg_create.write("\t\t\tbegin\n") sock_pkg_create.write('\t\t\t\tsock_ip := new string\'(\\"$2\\");\n') sock_pkg_create.write("\t\t\t\treturn sock_ip.all;\n") sock_pkg_create.write("\t\t\tend function;\n\n") sock_pkg_create.write("\t\tend package body;\" > sock_pkg.vhdl")
def parse_config(filename): cp = SafeConfigParser(allow_no_value=True) cp.read(filename) return cp._sections
def loginAmazon(self): # Initialising the parser userParser = SafeConfigParser() userParser.read('userconfig.ini') userParser.optionxform = str parsingDictionary = {"service": "AMAZON"} # Required variables for filling in config file baseurl = userParser.get(parsingDictionary['service'], 'url') username = userParser.get(parsingDictionary['service'], 'username') password = userParser.get(parsingDictionary['service'], 'password') parser = SafeConfigParser() parser.read('config.ini') parser.optionxform = str xpaths = {'usernameBox': "//*[@id='ap_email']", 'passwordBox': "//*[@id='ap_password']", 'submitButton': "//*[@id='signInSubmit']" } amazonDriver = webdriver.Chrome() amazonDriver.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:36.0) Gecko/20100101 Firefox/36.0 WebKit' amazonDriver.cookiesEnabled = True amazonDriver.javascriptEnabled = True amazonDriver.get(baseurl) # Clearing Username TextBox amazonDriver.find_element_by_xpath(xpaths['usernameBox']).clear() # Typing in the username as obtained from config file amazonDriver.find_element_by_xpath( xpaths['usernameBox']).send_keys(username) # Clearing password field amazonDriver.find_element_by_xpath(xpaths['passwordBox']).clear() # Typing in the password amazonDriver.find_element_by_xpath( xpaths['passwordBox']).send_keys(password) # Clicking on Submit button amazonDriver.find_element_by_xpath(xpaths['submitButton']).click() # temp = input() amazonDriver.get(self.urlName) pageSource = amazonDriver.page_source self.soupObject = BeautifulSoup(pageSource, "lxml", from_encoding="utf8") # print(pageSource) fh = open(self.requestsFileName, "w") fh.write(str(self.soupObject)) fh.close() pass amazonDriver.close()
def __init__(self, file): # Script starts from here print("Arguement is : ", file) self.fname = os.path.basename(file) print("VHDL filename is : ", self.fname) self.home = os.path.expanduser("~") self.parser = SafeConfigParser() self.parser.read( os.path.join(self.home, os.path.join('.nghdl', 'config.ini'))) self.ngspice_home = self.parser.get('NGSPICE', 'NGSPICE_HOME') self.release_dir = self.parser.get('NGSPICE', 'RELEASE') self.src_home = self.parser.get('SRC', 'SRC_HOME') self.licensefile = self.parser.get('SRC', 'LICENSE') # #### Creating connection_info.txt file from vhdl file #### # read_vhdl = open(file, 'r') vhdl_data = read_vhdl.readlines() read_vhdl.close() start_flag = -1 # Used for scaning part of data scan_data = [] # p=re.search('port(.*?)end',read_vhdl,re.M|re.I|re.DOTALL).group() for item in vhdl_data: if re.search('port', item, re.I): start_flag = 1 elif re.search("end", item, re.I): start_flag = 0 if start_flag == 1: item = re.sub("port", " ", item, flags=re.I) item = re.sub("\(", " ", item, flags=re.I) # noqa item = re.sub("\)", " ", item, flags=re.I) # noqa item = re.sub(";", " ", item, flags=re.I) scan_data.append(item.rstrip()) scan_data = [_f for _f in scan_data if _f] elif start_flag == 0: break port_info = [] self.port_vector_info = [] for item in scan_data: print("Scan Data :", item) if re.search("in", item, flags=re.I): if re.search("std_logic_vector", item, flags=re.I): temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I) elif re.search("std_logic", item, flags=re.I): temp = re.compile(r"\s*std_logic\s*", flags=re.I) else: raise ValueError("Please check your vhdl " + "code for datatype of input port") elif re.search("out", item, flags=re.I): if re.search("std_logic_vector", item, flags=re.I): temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I) elif re.search("std_logic", item, flags=re.I): temp = re.compile(r"\s*std_logic\s*", flags=re.I) else: raise ValueError("Please check your vhdl " + "code for datatype of output port") else: raise ValueError( "Please check the in/out direction of your port") lhs = temp.split(item)[0] rhs = temp.split(item)[1] bit_info = re.compile(r"\s*downto\s*", flags=re.I).split(rhs)[0] if bit_info: port_info.append(lhs + ":" + str(int(bit_info) + int(1))) self.port_vector_info.append(1) else: port_info.append(lhs + ":" + str(int(1))) self.port_vector_info.append(0) print("Port Info :", port_info) # Open connection_info.txt file con_ifo = open('connection_info.txt', 'w') for item in port_info: word = item.split(':') con_ifo.write(word[0].strip() + ' ' + word[1].strip() + ' ' + word[2].strip()) con_ifo.write("\n") con_ifo.close()
def getcustomerID(self): parser = SafeConfigParser() parser.read('config.ini') self.parametersDict['customerID'] = parser.get('AMAZON', 'customerid') pass
import logging import subprocess import os import string import datetime from pprint import pprint import psp.Pv as pv from .pmgrobj import pmgrobj logger = logging.getLogger(__name__) # Globals maxLenName = 42 parser = SafeConfigParser() parser.read(os.path.dirname(os.path.abspath(__file__)) + "/pmgrUtils.cfg") # To add hutches or objTypes to the supported list, look in pmgrUtils.cfg supportedHutches = parser.get("pmgr", "supportedHutches") supportedObjTypes = parser.get("pmgr", "supportedObjTypes") nTries = 3 # Number of attempts when using caget def getCfgVals(pmgr, PV, rename=True): """ Returns a dictionary of the live cfg fields associated with a PV """ PV = PV.upper() configFields = listCfgFields(pmgr) cfgDict = getFieldDict(pmgr, PV, configFields)
import pymysql import flask from flask import Flask import sqlalchemy from sqlalchemy import * from flask_sqlalchemy import * from flask_cors import CORS from flask_sqlalchemy import SQLAlchemy import json import os from configparser import SafeConfigParser pymysql.install_as_MySQLdb() # read congig file for secrets parser = SafeConfigParser() parser.read('config.ini') # wrapper function for parsing config file def my_parser(section, option): return str( parser.get(section, option).encode('ascii', 'ignore').decode('utf-8')) # get DB creds user = my_parser('database', 'user') pwd = my_parser('database', 'pwd') host = my_parser('database', 'host') db = my_parser('database', 'db') uri = 'mysql://%s:%s@%s/%s' % (user, pwd, host, db)
def getToken(self): parser = SafeConfigParser() parser.read('config.ini') self.parametersDict['token'] = parser.get('AMAZON', 'token') pass
def main(): global IS_AUTHORIZED logging.basicConfig(level=logging.INFO) parser = init_parser() args = parser.parse_args() if args.quiet: LOGGER.setLevel(logging.WARN) elif args.debug: LOGGER.setLevel(logging.DEBUG) github.enable_console_debug_logging() # Process args if args.quiet: args.git.append("--quiet") if args.include_everything: args.account = True args.include_starred = True args.include_watched = True args.include_followers = True args.include_following = True args.include_issues = True args.include_issue_comments = True args.include_issue_events = True args.include_pulls = True args.include_pull_comments = True args.include_pull_commits = True args.include_keys = True args.include_releases = True args.include_assets = True args.include_wiki = True if args.include_starred or args.include_watched or args.include_followers \ or args.include_following or args.include_keys: args.account = True args.backupdir = args.backupdir.rstrip("/") # Make the connection to Github here. config = {} if args.password == False: # no password option given, continue unauthenticated # unauthenticated users can only use http git method args.type = 'http' elif args.password == None: # password option given, but no password value given config = {'login_or_token': args.login_or_token} if os.path.isfile(CONFFILE): cfg = ConfigParser() cfg.read(CONFFILE) try: config['password'] = cfg.get('github-backup', 'APITOKEN') except: config['password'] = cfg.get('github-backup', 'PASSWORD') else: password = getpass.getpass('Enter password for {}: '.format( config['login_or_token'])) if password: config['password'] = password else: config = {'login_or_token': args.login_or_token} config['password'] = args.password LOGGER.debug("Github config: %r", config) gh = github.Github(**config) # Check that backup dir exists if not os.path.exists(args.backupdir): mkdir_p(args.backupdir) if args.organization: if args.password: account = gh.get_organization(args.org) else: account = gh.get_organization(args.login_or_token) else: if args.username: account = gh.get_user(args.username) elif config.get('password', None): account = gh.get_user() else: account = gh.get_user(args.login_or_token) IS_AUTHORIZED = isinstance(account, github.AuthenticatedUser.AuthenticatedUser) assert not (bool(config.get('password', None)) ^ IS_AUTHORIZED), account if args.include_keys and not IS_AUTHORIZED: LOGGER.info( "Cannot backup keys with unauthenticated account, ignoring...") args.include_keys = False filters = {} if IS_AUTHORIZED: # Get all repos filters = { 'affiliation': ','.join(args.affiliation), 'visibility': args.visibility } if args.account: process_account(gh, account, args) if args.include_gists: for gist in account.get_gists(): RepositoryBackup(gist, args).backup() if args.include_starred_gists and hasattr(account, 'get_starred_gists'): for gist in account.get_starred_gists(): RepositoryBackup(gist, args).backup() if not args.skip_repos: repos = account.get_repos(**filters) for repo in repos: if args.skip_forks and repo.fork: continue RepositoryBackup(repo, args).backup()
from urllib import request, parse from datetime import datetime, timedelta import re, requests, json, time, sys, DataAccess import logging, logging.config from lxml import html from configparser import SafeConfigParser parser = SafeConfigParser() parser.read('simple.ini') logging.config.fileConfig('logging.conf') logger = logging.getLogger('simpleExample') class Moip(object): __begin_date = None __end_date = None __access_token = None __refoEstabelecimento = None def __init__(self, *args): "Inicializando atributos da classe com os argumentos de entrada e chamada de procedimento" if (len(sys.argv) == 3): self.__refoEstabelecimento = args[0] self.__begin_date = args[1] self.__end_date = args[2] self.scrapePorCliente() elif (len(sys.argv) <= 1): self.__begin_date = "2017-03-01T14:20:54.00Z" self.__end_date = "2017-03-25T13:08.00Z" self.scrapeTodosClientes()
# coding: utf-8 import psycopg2 from psycopg2.extras import wait_select from configparser import SafeConfigParser parser = SafeConfigParser() def open_connection(dbname, user, host, port, password): connect_text = "dbname='{}' user='******' host={} port={} password='******'".format( dbname, user, host, port, password) connector = psycopg2.connect(connect_text) cursor = connector.cursor() return connector, cursor def open_connection_async(dbname, user, host, port, password): connect_text = "dbname='{}' user='******' host={} port={} password='******'".format( dbname, user, host, port, password) connector = psycopg2.connect(connect_text, async=1) # wait(connector) wait_select(connector) cursor = connector.cursor() return connector, cursor def close_connection(connector, cursor): cursor.close() connector.close() return None
import sys from os import rename import time from shutil import copy as filecopy from copy import deepcopy from ast import literal_eval from configparser import SafeConfigParser from config import config numcells = config['battery']['numcells'] import logger log = logger.logging.getLogger(__name__) log.setLevel(logger.logging.DEBUG) log.addHandler(logger.errfile) summaryfile = SafeConfigParser() summaryfile.read(config['files']['summaryfile']) #logdata =logger.logging.getlogger() #logdata.setLevel(logger.logging.INFO) #log.addHandler(logger.logfile) def loadsummary(): summary = {} for section in summaryfile.sections(): summary[section] = {} for key, val in summaryfile.items(section): summary[section][key] = literal_eval(val) # daysummaryfile = open('/media/75cc9171-4331-4f88-ac3f-0278d132fae9/daysummary','r')
import sys import boto3 import json import logging from botocore.exceptions import ClientError # Import utility helpers sys.path.insert(1, os.path.realpath(os.path.pardir)) import helpers # Get configuration ''' Reference: https://docs.python.org/3/library/configparser.html ''' from configparser import SafeConfigParser config = SafeConfigParser(os.environ) config.read('notify_config.ini') # Add utility code here def main(argv=None): # Connect to SQS and get the message queue sqs = boto3.resource('sqs', region_name=config['aws']['AwsRegionName']) queue = sqs.get_queue_by_name(QueueName=config['sqs']['QueueName']) # Dynamodb for accessing data dynamodb = boto3.resource('dynamodb', region_name=config['aws']['AwsRegionName']) table = dynamodb.Table(config['db']['Name']) # Poll the message queue in a loop while True:
def createThemeFromTemplate(title, description, baseOn='template'): """Create a new theme from the given title and description based on another theme resource directory """ source = queryResourceDirectory(THEME_RESOURCE_NAME, baseOn) if source is None: raise KeyError("Theme {0:s} not found".format(baseOn)) themeName = getUtility(IURLNormalizer).normalize(title) if six.PY2 and isinstance(themeName, six.text_type): themeName = themeName.encode('utf-8') resources = getOrCreatePersistentResourceDirectory() resources.makeDirectory(themeName) target = resources[themeName] cloneResourceDirectory(source, target) manifest = SafeConfigParser() if MANIFEST_FILENAME in target: if six.PY2: fp = target.openFile(MANIFEST_FILENAME) try: if hasattr(manifest, "read_file"): # backports.configparser manifest.read_file(fp) else: manifest.readfp(fp) finally: fp.close() else: # configparser can only read/write text # but in py3 plone.resource objects are BytesIO objects. fp = target.openFile(MANIFEST_FILENAME) try: data = fp.read() finally: fp.close() manifest.read_string(safe_unicode(data)) if not manifest.has_section('theme'): manifest.add_section('theme') if six.PY2 and isinstance(title, six.text_type): title = title.encode('utf-8') if six.PY2 and isinstance(description, six.text_type): description = description.encode('utf-8') manifest.set('theme', 'title', title) manifest.set('theme', 'description', description) if manifest.has_option('theme', 'prefix'): prefix = u"/++%s++%s" % (THEME_RESOURCE_NAME, themeName) manifest.set('theme', 'prefix', prefix) if manifest.has_option('theme', 'rules'): rule = manifest.get('theme', 'rules') rule_file_name = rule.split('/')[-1] # extract real rules file name rules = u"/++%s++%s/%s" % (THEME_RESOURCE_NAME, themeName, rule_file_name) manifest.set('theme', 'rules', rules) paths_to_fix = [ 'development-css', 'production-css', 'tinymce-content-css', 'development-js', 'production-js' ] for var_path in paths_to_fix: if not manifest.has_option('theme', var_path): continue val = manifest.get('theme', var_path) if not val: continue template_prefix = '++%s++%s/' % (THEME_RESOURCE_NAME, baseOn) if template_prefix in val: # okay, fix val = val.replace(template_prefix, '++%s++%s/' % (THEME_RESOURCE_NAME, themeName)) manifest.set('theme', var_path, val) # plone.resource uses OFS.File which is a BytesIO objects # but configparser can only deal with text (StringIO). # So we need to do this stupid dance to write manifest.cfg tempfile = six.StringIO() manifest.write(tempfile) tempfile.seek(0) data = tempfile.read() tempfile.close() manifestContents = six.BytesIO(safe_encode(data)) target.writeFile(MANIFEST_FILENAME, manifestContents) return themeName
def __init__(self, **kwargs): # Must call this __init__ manually :( ConfigParserOldStyle.__init__(self, **kwargs) super(ConfigParser, self).__init__(**kwargs)
class MakefileParser(object): def __init__(self): self.maxDiff = None self.parser = SafeConfigParser() self.parser.read('otbcfg.ini') if not os.path.exists('otbcfg.ini'): raise Exception( "OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified in the file otbcfg.ini" ) self.root_dir = self.parser.get('otb', 'checkout_dir') if not os.path.exists(self.root_dir): raise Exception( "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there" ) self.build_dir = self.parser.get('otb', 'build_dir') if not os.path.exists(self.build_dir): raise Exception( "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there" ) self.logger = get_OTB_log() def test_CMakelists(self): provided = {} provided["OTB_SOURCE_DIR"] = self.root_dir provided["OTB_BINARY_DIR"] = self.build_dir provided["OTB_DATA_LARGEINPUT_ROOT"] = os.path.normpath( os.path.join(self.root_dir, "../OTB-Data/Input")) try: with open(os.path.join(self.root_dir, "CMakeLists.txt")) as file_input: content = file_input.read() output = parse(content) defined_paths = [ each for each in output if 'Command' in str(type(each)) and "FIND_PATH" in each.name ] the_paths = { key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in defined_paths } the_sets = [ each for each in output if 'Command' in str(type(each)) and "SET" in each.name.upper() ] the_sets = { key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in the_sets } the_sets = {key: " ".join(the_sets[key]) for key in the_sets} the_strings = set([ each.body[-1].contents for each in output if 'Command' in str(type(each)) and "STRING" in each.name.upper() ]) def mini_clean(item): if item.startswith('"') and item.endswith( '"') and " " not in item: return item[1:-1] return item the_sets = {key: mini_clean(the_sets[key]) for key in the_sets} def templatize(item): if "$" in item: return Template(item) return item for key in the_sets: if key in the_strings: the_sets[key] = the_sets[key].lower() the_sets = {key: templatize(the_sets[key]) for key in the_sets} for path in the_paths: target_file = the_paths[path][1] suggested_paths = [] if len(the_paths[path]) > 2: suggested_paths = the_paths[path][2:] try: provided[path] = find_file(target_file) except Exception as e: for each in suggested_paths: st = Template(each) pac = os.path.abspath(st.safe_substitute(provided)) if os.path.exists(pac): provided[path] = pac break resolve_dict(provided, the_sets) provided.update(the_sets) return provided except Exception as e: traceback.print_exc() self.fail(str(e)) def add_make(self, previous_context, new_file): with open(new_file) as f: input = f.read() output = parse(input) apps = [each for each in output if 'Command' in str(type(each))] setcommands = [each for each in apps if 'SET' in each.name.upper()] stringcommands = [ each for each in apps if 'STRING' in each.name.upper() ] environment = previous_context def mini_clean(item): if item.startswith('"') and item.endswith('"') and " " not in item: return item[1:-1] return item new_env = {} for command in setcommands: key = command.body[0].contents ct = " ".join([item.contents for item in command.body[1:]]) ct = mini_clean(ct) if "$" in ct: values = Template(ct) else: values = ct new_env[key] = values for stringcommand in stringcommands: key = stringcommand.body[-1].contents ct = stringcommand.body[-2].contents ct = mini_clean(ct.lower()) if "$" in ct: values = LowerTemplate(ct) else: values = ct new_env[key] = values resolve_dict(environment, new_env) environment.update(new_env) return environment def get_apps(self, the_makefile, the_dict): with open(the_makefile) as f: input = f.read() output = parse(input) apps = [each for each in output if 'Command' in str(type(each))] otb_apps = [ each for each in apps if 'OTB_TEST_APPLICATION' in each.name.upper() ] return otb_apps def get_tests(self, the_makefile, the_dict): with open(the_makefile) as f: input = f.read() output = parse(input) apps = [each for each in output if 'Command' in str(type(each))] otb_tests = [each for each in apps if 'ADD_TEST' in each.name.upper()] return otb_tests def get_apps_with_context(self, the_makefile, the_dict): with open(the_makefile) as f: input = f.read() output = parse(input) def is_a_command(item): return 'Command' in str(type(item)) appz = [] context = [] for each in output: if is_a_command(each): if 'FOREACH' in each.name and 'ENDFOREACH' not in each.name: args = [item.contents for item in each.body] context.append(args) elif 'ENDFOREACH' in each.name: context.pop() elif 'OTB_TEST_APPLICATION' in each.name.upper(): appz.append((each, context[:])) return appz def get_name_line(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = itemz[0][0] the_string = Template(result).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(the_string).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % str(e)) neo_dict[str(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def get_command_line(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = [] result.extend(["otbcli_%s" % each for each in itemz[1]]) if len(result[0]) == 7: raise Exception("App name is empty!") result.extend(itemz[2]) result.append("-testenv") result.extend(itemz[3]) the_string = Template(" ".join(result)).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(" ".join(result)).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % str(e)) neo_dict[str(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def get_test(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = ["otbTestDriver"] result.extend(itemz[4]) if len(result) == 1: return "" the_string = Template(" ".join(result)).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(" ".join(result)).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % str(e)) neo_dict[str(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def test_algos(self): tests = {} algos_dir = os.path.join(self.root_dir, "Testing/Applications") makefiles = find_files("CMakeLists.txt", algos_dir) to_be_excluded = os.path.join(self.root_dir, "Testing/Applications/CMakeLists.txt") if to_be_excluded in makefiles: makefiles.remove(to_be_excluded) resolve_algos = {} for makefile in makefiles: intermediate_makefiles = [] path = makefile.split(os.sep)[len(self.root_dir.split(os.sep)):-1] for ind in range(len(path)): tmp_path = path[:ind + 1] tmp_path.append("CMakeLists.txt") tmp_path = os.sep.join(tmp_path) candidate_makefile = os.path.join(self.root_dir, tmp_path) if os.path.exists(candidate_makefile): intermediate_makefiles.append(candidate_makefile) resolve_algos[makefile] = intermediate_makefiles dict_for_algo = {} for makefile in makefiles: basic = self.test_CMakelists() last_context = self.add_make( basic, os.path.join(self.root_dir, "Testing/Utilities/CMakeLists.txt")) for intermediate_makefile in resolve_algos[makefile]: last_context = self.add_make(last_context, intermediate_makefile) dict_for_algo[makefile] = last_context for makefile in makefiles: appz = self.get_apps_with_context(makefile, dict_for_algo[makefile]) for app, context in appz: if len(context) == 0: import copy ddi = copy.deepcopy(dict_for_algo[makefile]) tk_dict = autoresolve(ddi) tk_dict = autoresolve(tk_dict) name_line = self.get_name_line(app.body, tk_dict) command_line = self.get_command_line(app.body, tk_dict) test_line = self.get_test(app.body, tk_dict) if '$' in test_line or '$' in command_line: if '$' in command_line: self.logger.error(command_line) if '$' in test_line: self.logger.warning(test_line) else: tests[name_line] = (command_line, test_line) else: contexts = {} for iteration in context: key = iteration[0] values = [each[1:-1].lower() for each in iteration[1:]] contexts[key] = values keyorder = list(contexts.keys()) import itertools pool = [ each for each in itertools.product(*list(contexts.values())) ] import copy for poolinstance in pool: neo_dict = copy.deepcopy(dict_for_algo[makefile]) zipped = list(zip(keyorder, poolinstance)) for each in zipped: neo_dict[each[0]] = each[1] ak_dict = autoresolve(neo_dict) ak_dict = autoresolve(ak_dict) ak_dict = autoresolve(ak_dict) ddi = ak_dict name_line = self.get_name_line(app.body, ddi) command_line = self.get_command_line(app.body, ddi) test_line = self.get_test(app.body, ddi) if '$' in command_line or '$' not in test_line: if '$' in command_line: self.logger.error(command_line) if '$' in test_line: self.logger.warning(test_line) else: tests[name_line] = (command_line, test_line) return tests
from configparser import SafeConfigParser from ohmysportsfeedspy import MySportsFeeds #Init conf.ini parse = SafeConfigParser() parse.read('conf.ini') #Grab consts from conf.ini api_ver = parse.get('main','api_ver') api_username = parse.get('main', 'api_username') api_password = parse.get('main', 'api_password') interval = parse.get('main', 'interval') #Init MSF Connection msf = MySportsFeeds(version=api_ver) msf.authenticate(api_username,api_password) def get_latest_data(sport, msf): #To-Do: Get all games in the next 14 days for given sport rtdata = msf.msf_get_data(league=sport) def main(): if __name__ == '__main__': #Main service loop while(True): get_latest_data('nfl',msf) get_latest_data('nba',msf) get_latest_data('mlb',msf) time.sleep(interval) main()
class OVirtInventory(object): # datacenter group variables extracted from configuration _DC_OPT_VARS = [(ANSIBLE_SSH_USER, str), (ANSIBLE_SSH_PK, str), (ANSIBLE_SSH_PORT, int), (ANSIBLE_SSH_PASS, str)] # supported and their tag keys _ROLES = {'control': OVIRT_TAG_CONTROL, 'worker': OVIRT_TAG_WORKER} def __init__(self, config_file): self._config = ConfigParser( defaults={ OVIRT_IP_REGEX: '^(\d+).(\d+).(\d+).(\d+)$', OVIRT_NIC_NAME: None, OVIRT_TAG_CONTROL: 'mi-control', OVIRT_TAG_WORKER: 'mi-worker', }) self._config.read(config_file) @property def config(self): """ :return: configuration used by this instance :rtype: ConfigParser.SafeConfigParser """ return self._config @property def datacenters(self): return self.config.sections() def api(self, dc): """ Create an oVirt API instance for a requested datacenter :param dc: datacenter :type dc: str :rtype: ovirtsdk.api.API """ kwargs = { 'password': self.config.get(dc, OVIRT_PASSWORD), 'url': self.config.get(dc, OVIRT_URL), 'username': self.config.get(dc, OVIRT_USERNAME), } if self.config.has_option(dc, OVIRT_CA): kwargs['ca_file'] = self.config.get(dc, OVIRT_CA) if self.config.has_option(dc, OVIRT_API_INSECURE): kwargs['insecure'] = bool(self.config.get(dc, OVIRT_API_INSECURE)) return API(**kwargs) def ip(self, dc, vm): """ Fetch the IP address of a VM in a datacenter based on configuration :param dc: datacenter the VM belongs to :type dc: str :param vm: the vm instance :type vm: ovirtsdk.infrastructure.brokers.VM :return: IP address string based on any NIC and REGEX conditions configured for the datacenter """ nic_name = self.config.get(dc, OVIRT_NIC_NAME) ip_regex = self.config.get(dc, OVIRT_IP_REGEX) pattern = regex_compile('' if ip_regex is None else ip_regex) if nic_name is not None: nics = [vm.get_nics().get(name='nic_{0:s}'.format(nic_name))] else: nics = vm.get_nics().list() ips = [] for nic in nics: for device in nic.get_reported_devices().get_reported_device(): ips.extend(device.get_ips().get_ip()) for ip in ips: if pattern.match(ip.get_address()) is None: continue return ip.get_address() return None def hosts(self, dc, tag=None): """ Retrieve hosts in a datacenter filtered by tags if one is provided. :param dc: datacenter :type dc: str :param tag: oVirt tag name :type tag: str :return: a list of host IP addresses :rtype: list of str """ hosts = [] ovirt_dc = dc if self.config.has_option(dc, OVIRT_DC): ovirt_dc = self.config.get(dc, OVIRT_DC) # filter by datacenter conditions = ['datacenter={0:s}'.format(ovirt_dc)] if tag is not None: # filter by tag conditions.append('tag={0:s}'.format(tag)) query = ' and '.join(conditions) for vm in self.api(dc).vms.list(query=query): if vm.status.state == 'up': # handle only if a VM is up ip = self.ip(dc, vm) if ip is not None: # append only if an IP can be extracted based on configuration hosts.append(ip) return hosts def inventory(self): """ :return: dynamic inventory dictionary object for the configuration used in this instance :rtype: dict """ data = { '_meta': { 'hostvars': {} }, } for dc in self.datacenters: dc_group = 'dc={0:s}'.format(dc) # configure vars common to all hosts in a datacenter data[dc_group] = { 'hosts': [], 'vars': { cfg[0]: cfg[1](self.config.get(dc, cfg[0])) for cfg in self._DC_OPT_VARS if self.config.has_option(dc, cfg[0]) } } data[dc_group]['vars']['dc'] = dc for role in self._ROLES.keys(): # populate groups for each role group = 'role={0:s}'.format(role) tag = self.config.get(dc, self._ROLES.get(role)) if group not in data: data[group] = {'hosts': []} consul_dc = dc if self.config.has_option(dc, CONSUL_DC): consul_dc = self.config.get(dc, CONSUL_DC) # hostvars for all hosts in this role are the same for this datacenter hostvars = { 'role': role, 'dc': dc, 'consul_dc': consul_dc, } hosts = self.hosts(dc, tag) # add hosts to both groups (role, datacenter) data[group]['hosts'].extend(hosts) data[dc_group]['hosts'].extend(hosts) for host in hosts: # populate _meta data['_meta']['hostvars'][host] = { 'role': role, 'dc': dc, 'consul_dc': consul_dc, 'private_ipv4': host, 'provider': 'ovirt' } return data
class Buildozer(object): standard_cmds = ('distclean', 'update', 'debug', 'release', 'deploy', 'run', 'serve') def __init__(self, filename='buildozer.spec', target=None): super(Buildozer, self).__init__() self.log_level = 1 self.environ = {} self.specfilename = filename self.state = None self.build_id = None self.config_profile = '' self.config = SafeConfigParser(allow_no_value=True) self.config.optionxform = lambda value: value self.config.getlist = self._get_config_list self.config.getlistvalues = self._get_config_list_values self.config.getdefault = self._get_config_default self.config.getbooldefault = self._get_config_bool self.config.getrawdefault = self._get_config_raw_default if exists(filename): try: self.config.read(filename, "utf-8") except TypeError: # python 2 has no second arg here self.config.read(filename) self.check_configuration_tokens() # Check all section/tokens for env vars, and replace the # config value if a suitable env var exists. set_config_from_envs(self.config) try: self.log_level = int( self.config.getdefault('buildozer', 'log_level', '1')) except: pass build_dir = self.config.getdefault('buildozer', 'builddir', None) if build_dir: # for backwards compatibility, append .buildozer to builddir build_dir = join(build_dir, '.buildozer') self.build_dir = self.config.getdefault('buildozer', 'build_dir', build_dir) if self.build_dir: self.build_dir = realpath(join(self.root_dir, self.build_dir)) self.user_bin_dir = self.config.getdefault('buildozer', 'bin_dir', None) if self.user_bin_dir: self.user_bin_dir = realpath(join(self.root_dir, self.user_bin_dir)) self.targetname = None self.target = None if target: self.set_target(target) def set_target(self, target): '''Set the target to use (one of buildozer.targets, such as "android") ''' self.targetname = target m = __import__('buildozer.targets.{0}'.format(target), fromlist=['buildozer']) self.target = m.get_target(self) self.check_build_layout() self.check_configuration_tokens() def prepare_for_build(self): '''Prepare the build. ''' assert (self.target is not None) if hasattr(self.target, '_build_prepared'): return self.info('Preparing build') self.info('Check requirements for {0}'.format(self.targetname)) self.target.check_requirements() self.info('Install platform') self.target.install_platform() self.info('Check application requirements') self.check_application_requirements() self.info('Check garden requirements') self.check_garden_requirements() self.info('Compile platform') self.target.compile_platform() # flag to prevent multiple build self.target._build_prepared = True def build(self): '''Do the build. The target can set build_mode to 'release' or 'debug' before calling this method. (:meth:`prepare_for_build` must have been call before.) ''' assert (self.target is not None) assert (hasattr(self.target, '_build_prepared')) if hasattr(self.target, '_build_done'): return # increment the build number self.build_id = int(self.state.get('cache.build_id', '0')) + 1 self.state['cache.build_id'] = str(self.build_id) self.info('Build the application #{}'.format(self.build_id)) self.build_application() self.info('Package the application') self.target.build_package() # flag to prevent multiple build self.target._build_done = True # # Log functions # def log(self, level, msg): if level > self.log_level: return if USE_COLOR: color = COLOR_SEQ(LOG_LEVELS_C[level]) print(''.join((RESET_SEQ, color, '# ', msg, RESET_SEQ))) else: print('{} {}'.format(LOG_LEVELS_T[level], msg)) def debug(self, msg): self.log(2, msg) def info(self, msg): self.log(1, msg) def error(self, msg): self.log(0, msg) # # Internal check methods # def checkbin(self, msg, fn): self.debug('Search for {0}'.format(msg)) if exists(fn): return realpath(fn) for dn in environ['PATH'].split(':'): rfn = realpath(join(dn, fn)) if exists(rfn): self.debug(' -> found at {0}'.format(rfn)) return rfn self.error('{} not found, please install it.'.format(msg)) exit(1) def cmd(self, command, **kwargs): # prepare the environ, based on the system + our own env env = copy(environ) env.update(self.environ) # prepare the process kwargs.setdefault('env', env) kwargs.setdefault('stdout', PIPE) kwargs.setdefault('stderr', PIPE) kwargs.setdefault('close_fds', True) kwargs.setdefault('shell', True) kwargs.setdefault('show_output', self.log_level > 1) show_output = kwargs.pop('show_output') get_stdout = kwargs.pop('get_stdout', False) get_stderr = kwargs.pop('get_stderr', False) break_on_error = kwargs.pop('break_on_error', True) sensible = kwargs.pop('sensible', False) if not sensible: self.debug('Run {0!r}'.format(command)) else: if type(command) in (list, tuple): self.debug('Run {0!r} ...'.format(command[0])) else: self.debug('Run {0!r} ...'.format(command.split()[0])) self.debug('Cwd {}'.format(kwargs.get('cwd'))) # open the process if sys.platform == 'win32': kwargs.pop('close_fds', None) process = Popen(command, **kwargs) # prepare fds fd_stdout = process.stdout.fileno() fd_stderr = process.stderr.fileno() if fcntl: fcntl.fcntl(fd_stdout, fcntl.F_SETFL, fcntl.fcntl(fd_stdout, fcntl.F_GETFL) | os.O_NONBLOCK) fcntl.fcntl(fd_stderr, fcntl.F_SETFL, fcntl.fcntl(fd_stderr, fcntl.F_GETFL) | os.O_NONBLOCK) ret_stdout = [] if get_stdout else None ret_stderr = [] if get_stderr else None while True: try: readx = select.select([fd_stdout, fd_stderr], [], [])[0] except select.error: break if fd_stdout in readx: chunk = process.stdout.read() if not chunk: break if get_stdout: ret_stdout.append(chunk) if show_output: if IS_PY3: stderr.write(chunk.decode('utf-8')) else: stdout.write(chunk) if fd_stderr in readx: chunk = process.stderr.read() if not chunk: break if get_stderr: ret_stderr.append(chunk) if show_output: if IS_PY3: stderr.write(chunk.decode('utf-8')) else: stderr.write(chunk) stdout.flush() stderr.flush() process.communicate() if process.returncode != 0 and break_on_error: self.error('Command failed: {0}'.format(command)) self.error('') self.error('Buildozer failed to execute the last command') if self.log_level <= 1: self.error( 'If the error is not obvious, please raise the log_level to 2' ) self.error('and retry the latest command.') else: self.error( 'The error might be hidden in the log above this error') self.error( 'Please read the full log, and search for it before') self.error('raising an issue with buildozer itself.') self.error( 'In case of a bug report, please add a full log with log_level = 2' ) raise BuildozerCommandException() if ret_stdout: ret_stdout = b''.join(ret_stdout) if ret_stderr: ret_stderr = b''.join(ret_stderr) return (ret_stdout.decode('utf-8', 'ignore') if ret_stdout else None, ret_stderr.decode('utf-8') if ret_stderr else None, process.returncode) def cmd_expect(self, command, **kwargs): from pexpect import spawnu # prepare the environ, based on the system + our own env env = copy(environ) env.update(self.environ) # prepare the process kwargs.setdefault('env', env) kwargs.setdefault('show_output', self.log_level > 1) sensible = kwargs.pop('sensible', False) show_output = kwargs.pop('show_output') if show_output: if IS_PY3: kwargs['logfile'] = codecs.getwriter('utf8')(stdout.buffer) else: kwargs['logfile'] = codecs.getwriter('utf8')(stdout) if not sensible: self.debug('Run (expect) {0!r}'.format(command)) else: self.debug('Run (expect) {0!r} ...'.format(command.split()[0])) self.debug('Cwd {}'.format(kwargs.get('cwd'))) return spawnu(command, **kwargs) def check_configuration_tokens(self): '''Ensure the spec file is 'correct'. ''' self.info('Check configuration tokens') self.migrate_configuration_tokens() get = self.config.getdefault errors = [] adderror = errors.append if not get('app', 'title', ''): adderror('[app] "title" is missing') if not get('app', 'source.dir', ''): adderror('[app] "source.dir" is missing') package_name = get('app', 'package.name', '') if not package_name: adderror('[app] "package.name" is missing') elif package_name[0] in map(str, range(10)): adderror('[app] "package.name" may not start with a number.') version = get('app', 'version', '') version_regex = get('app', 'version.regex', '') if not version and not version_regex: adderror('[app] One of "version" or "version.regex" must be set') if version and version_regex: adderror('[app] Conflict between "version" and "version.regex"' ', only one can be used.') if version_regex and not get('app', 'version.filename', ''): adderror('[app] "version.filename" is missing' ', required by "version.regex"') orientation = get('app', 'orientation', 'landscape') if orientation not in ('landscape', 'portrait', 'all', 'sensorLandscape'): adderror('[app] "orientation" have an invalid value') if errors: self.error('{0} error(s) found in the buildozer.spec'.format( len(errors))) for error in errors: print(error) exit(1) def migrate_configuration_tokens(self): config = self.config if config.has_section("app"): migration = (("android.p4a_dir", "p4a.source_dir"), ("android.p4a_whitelist", "android.whitelist"), ("android.bootstrap", "p4a.bootstrap"), ("android.branch", "p4a.branch"), ("android.p4a_whitelist_src", "android.whitelist_src"), ("android.p4a_blacklist_src", "android.blacklist_src")) for entry_old, entry_new in migration: if not config.has_option("app", entry_old): continue value = config.get("app", entry_old) config.set("app", entry_new, value) config.remove_option("app", entry_old) self.error( "In section [app]: {} is deprecated, rename to {}!".format( entry_old, entry_new)) def check_build_layout(self): '''Ensure the build (local and global) directory layout and files are ready. ''' self.info('Ensure build layout') if not exists(self.specfilename): print('No {0} found in the current directory. Abandon.'.format( self.specfilename)) exit(1) # create global dir self.mkdir(self.global_buildozer_dir) self.mkdir(self.global_cache_dir) # create local .buildozer/ dir self.mkdir(self.buildozer_dir) # create local bin/ dir self.mkdir(self.bin_dir) self.mkdir(self.applibs_dir) self.state = JsonStore(join(self.buildozer_dir, 'state.db')) target = self.targetname if target: self.mkdir(join(self.global_platform_dir, target, 'platform')) self.mkdir(join(self.buildozer_dir, target, 'platform')) self.mkdir(join(self.buildozer_dir, target, 'app')) def check_application_requirements(self): '''Ensure the application requirements are all available and ready to be packaged as well. ''' requirements = self.config.getlist('app', 'requirements', '') target_available_packages = self.target.get_available_packages() if target_available_packages is True: # target handles all packages! return # remove all the requirements that the target can compile onlyname = lambda x: x.split('==')[0] requirements = [ x for x in requirements if onlyname(x) not in target_available_packages ] if requirements and hasattr(sys, 'real_prefix'): e = self.error e('virtualenv is needed to install pure-Python modules, but') e('virtualenv does not support nesting, and you are running') e('buildozer in one. Please run buildozer outside of a') e('virtualenv instead.') exit(1) # did we already installed the libs ? if exists(self.applibs_dir) and \ self.state.get('cache.applibs', '') == requirements: self.debug('Application requirements already installed, pass') return # recreate applibs self.rmdir(self.applibs_dir) self.mkdir(self.applibs_dir) # ok now check the availability of all requirements for requirement in requirements: self._install_application_requirement(requirement) # everything goes as expected, save this state! self.state['cache.applibs'] = requirements def _install_application_requirement(self, module): self._ensure_virtualenv() self.debug('Install requirement {} in virtualenv'.format(module)) self.cmd('pip install --target={} {}'.format(self.applibs_dir, module), env=self.env_venv, cwd=self.buildozer_dir) def check_garden_requirements(self): '''Ensure required garden packages are available to be included. ''' garden_requirements = self.config.getlist('app', 'garden_requirements', '') # have we installed the garden packages? if exists(self.gardenlibs_dir) and \ self.state.get('cache.gardenlibs', '') == garden_requirements: self.debug('Garden requirements already installed, pass') return # we're going to reinstall all the garden libs. self.rmdir(self.gardenlibs_dir) # but if we don't have requirements, or if the user removed everything, # don't do anything. if not garden_requirements: self.state['cache.gardenlibs'] = garden_requirements return self._ensure_virtualenv() self.cmd('pip install Kivy-Garden==0.1.1', env=self.env_venv) # recreate gardenlibs self.mkdir(self.gardenlibs_dir) for requirement in garden_requirements: self._install_garden_package(requirement) # save gardenlibs state self.state['cache.gardenlibs'] = garden_requirements def _install_garden_package(self, package): self._ensure_virtualenv() self.debug( 'Install garden package {} in buildozer_dir'.format(package)) self.cmd('garden install --app {}'.format(package), env=self.env_venv, cwd=self.buildozer_dir) def _ensure_virtualenv(self): if hasattr(self, 'venv'): return self.venv = join(self.buildozer_dir, 'venv') if not self.file_exists(self.venv): self.cmd('virtualenv --python=python2.7 ./venv', cwd=self.buildozer_dir) # read virtualenv output and parse it output = self.cmd('bash -c "source venv/bin/activate && env"', get_stdout=True, cwd=self.buildozer_dir) self.env_venv = copy(self.environ) for line in output[0].splitlines(): args = line.split('=', 1) if len(args) != 2: continue key, value = args if key in ('VIRTUAL_ENV', 'PATH'): self.env_venv[key] = value if 'PYTHONHOME' in self.env_venv: del self.env_venv['PYTHONHOME'] # ensure any sort of compilation will fail self.env_venv['CC'] = '/bin/false' self.env_venv['CXX'] = '/bin/false' def mkdir(self, dn): if exists(dn): return self.debug('Create directory {0}'.format(dn)) makedirs(dn) def rmdir(self, dn): if not exists(dn): return self.debug('Remove directory and subdirectory {}'.format(dn)) rmtree(dn) def file_matches(self, patterns): from glob import glob result = [] for pattern in patterns: matches = glob(expanduser(pattern.strip())) result.extend(matches) return result def file_exists(self, *args): return exists(join(*args)) def file_rename(self, source, target, cwd=None): if cwd: source = join(cwd, source) target = join(cwd, target) self.debug('Rename {0} to {1}'.format(source, target)) if not os.path.isdir(os.path.dirname(target)): self.error(('Rename {0} to {1} fails because {2} is not a ' 'directory').format(source, target, target)) move(source, target) def file_copy(self, source, target, cwd=None): if cwd: source = join(cwd, source) target = join(cwd, target) self.debug('Copy {0} to {1}'.format(source, target)) copyfile(source, target) def file_extract(self, archive, cwd=None): if archive.endswith('.tgz') or archive.endswith('.tar.gz'): # XXX tarfile doesn't work for NDK-r8c :( #tf = tarfile.open(archive, 'r:*') #tf.extractall(path=cwd) #tf.close() self.cmd('tar xzf {0}'.format(archive), cwd=cwd) return if archive.endswith('.tbz2') or archive.endswith('.tar.bz2'): # XXX same as before self.cmd('tar xjf {0}'.format(archive), cwd=cwd) return if archive.endswith('.bin'): # To process the bin files for linux and darwin systems self.cmd('chmod a+x {0}'.format(archive), cwd=cwd) self.cmd('./{0}'.format(archive), cwd=cwd) return if archive.endswith('.zip'): self.cmd('unzip {}'.format(join(cwd, archive)), cwd=cwd) return raise Exception('Unhandled extraction for type {0}'.format(archive)) def file_copytree(self, src, dest): print('copy {} to {}'.format(src, dest)) if os.path.isdir(src): if not os.path.isdir(dest): os.makedirs(dest) files = os.listdir(src) for f in files: self.file_copytree(os.path.join(src, f), os.path.join(dest, f)) else: copyfile(src, dest) def clean_platform(self): self.info('Clean the platform build directory') if not exists(self.platform_dir): return rmtree(self.platform_dir) def download(self, url, filename, cwd=None): def report_hook(index, blksize, size): if size <= 0: progression = '{0} bytes'.format(index * blksize) else: progression = '{0:.2f}%'.format(index * blksize * 100. / float(size)) stdout.write('- Download {}\r'.format(progression)) stdout.flush() url = url + filename if cwd: filename = join(cwd, filename) if self.file_exists(filename): unlink(filename) self.debug('Downloading {0}'.format(url)) urlretrieve(url, filename, report_hook) return filename def get_version(self): c = self.config has_version = c.has_option('app', 'version') has_regex = c.has_option('app', 'version.regex') has_filename = c.has_option('app', 'version.filename') # version number specified if has_version: if has_regex or has_filename: raise Exception( 'version.regex and version.filename conflict with version') return c.get('app', 'version') # search by regex if has_regex or has_filename: if has_regex and not has_filename: raise Exception('version.filename is missing') if has_filename and not has_regex: raise Exception('version.regex is missing') fn = c.get('app', 'version.filename') with open(fn) as fd: data = fd.read() regex = c.get('app', 'version.regex') match = search(regex, data) if not match: raise Exception('Unable to find capture version in {0}\n' ' (looking for `{1}`)'.format(fn, regex)) version = match.groups()[0] self.debug('Captured version: {0}'.format(version)) return version raise Exception('Missing version or version.regex + version.filename') def build_application(self): self._copy_application_sources() self._copy_application_libs() self._copy_garden_libs() self._add_sitecustomize() def _copy_application_sources(self): # XXX clean the inclusion/exclusion algo. source_dir = realpath(self.config.getdefault('app', 'source.dir', '.')) include_exts = self.config.getlist('app', 'source.include_exts', '') exclude_exts = self.config.getlist('app', 'source.exclude_exts', '') exclude_dirs = self.config.getlist('app', 'source.exclude_dirs', '') exclude_patterns = self.config.getlist('app', 'source.exclude_patterns', '') include_patterns = self.config.getlist('app', 'source.include_patterns', '') app_dir = self.app_dir self.debug('Copy application source from {}'.format(source_dir)) rmtree(self.app_dir) for root, dirs, files in walk(source_dir, followlinks=True): # avoid hidden directory if True in [x.startswith('.') for x in root.split(sep)]: continue # need to have sort-of normalization. Let's say you want to exclude # image directory but not images, the filtered_root must have a / at # the end, same for the exclude_dir. And then we can safely compare filtered_root = root[len(source_dir) + 1:].lower() if filtered_root: filtered_root += '/' # manual exclude_dirs approach is_excluded = False for exclude_dir in exclude_dirs: if exclude_dir[-1] != '/': exclude_dir += '/' if filtered_root.startswith(exclude_dir): is_excluded = True break # pattern matching if not is_excluded: # match pattern if not ruled out by exclude_dirs for pattern in exclude_patterns: if fnmatch(filtered_root, pattern): is_excluded = True break for pattern in include_patterns: if fnmatch(filtered_root, pattern): is_excluded = False break if is_excluded: continue for fn in files: # avoid hidden files if fn.startswith('.'): continue # pattern matching is_excluded = False dfn = fn.lower() if filtered_root: dfn = join(filtered_root, fn) for pattern in exclude_patterns: if fnmatch(dfn, pattern): is_excluded = True break for pattern in include_patterns: if fnmatch(dfn, pattern): is_excluded = False break if is_excluded: continue # filter based on the extension # TODO more filters basename, ext = splitext(fn) if ext: ext = ext[1:] if include_exts and ext not in include_exts: continue if exclude_exts and ext in exclude_exts: continue sfn = join(root, fn) rfn = realpath(join(app_dir, root[len(source_dir) + 1:], fn)) # ensure the directory exists dfn = dirname(rfn) self.mkdir(dfn) # copy! self.debug('Copy {0}'.format(sfn)) copyfile(sfn, rfn) def _copy_application_libs(self): # copy also the libs copytree(self.applibs_dir, join(self.app_dir, '_applibs')) def _copy_garden_libs(self): if exists(self.gardenlibs_dir): copytree(self.gardenlibs_dir, join(self.app_dir, 'libs')) def _add_sitecustomize(self): copyfile(join(dirname(__file__), 'sitecustomize.py'), join(self.app_dir, 'sitecustomize.py')) main_py = join(self.app_dir, 'service', 'main.py') if not self.file_exists(main_py): #self.error('Unable to patch main_py to add applibs directory.') return header = (b'import sys, os; ' b'sys.path = [os.path.join(os.getcwd(),' b'"..", "_applibs")] + sys.path\n') with open(main_py, 'rb') as fd: data = fd.read() data = header + data with open(main_py, 'wb') as fd: fd.write(data) self.info('Patched service/main.py to include applibs') def namify(self, name): '''Return a "valid" name from a name with lot of invalid chars (allowed characters: a-z, A-Z, 0-9, -, _) ''' return re.sub('[^a-zA-Z0-9_\-]', '_', name) @property def root_dir(self): return realpath(dirname(self.specfilename)) @property def buildozer_dir(self): if self.build_dir: return self.build_dir return join(self.root_dir, '.buildozer') @property def bin_dir(self): if self.user_bin_dir: return self.user_bin_dir return join(self.root_dir, 'bin') @property def platform_dir(self): return join(self.buildozer_dir, self.targetname, 'platform') @property def app_dir(self): return join(self.buildozer_dir, self.targetname, 'app') @property def applibs_dir(self): return join(self.buildozer_dir, 'applibs') @property def gardenlibs_dir(self): return join(self.buildozer_dir, 'libs') @property def global_buildozer_dir(self): return join(expanduser('~'), '.buildozer') @property def global_platform_dir(self): return join(self.global_buildozer_dir, self.targetname, 'platform') @property def global_packages_dir(self): return join(self.global_buildozer_dir, self.targetname, 'packages') @property def global_cache_dir(self): return join(self.global_buildozer_dir, 'cache') @property def package_full_name(self): package_name = self.config.getdefault('app', 'package.name', '') package_domain = self.config.getdefault('app', 'package.domain', '') if package_domain == '': return package_name return '{}.{}'.format(package_domain, package_name) # # command line invocation # def targets(self): for fn in listdir(join(dirname(__file__), 'targets')): if fn.startswith('.') or fn.startswith('__'): continue if not fn.endswith('.py'): continue target = fn[:-3] try: m = __import__('buildozer.targets.{0}'.format(target), fromlist=['buildozer']) yield target, m except NotImplementedError: pass except: raise pass def usage(self): print('Usage:') print( ' buildozer [--profile <name>] [--verbose] [target] <command>...' ) print(' buildozer --version') print('') print('Available targets:') targets = list(self.targets()) for target, m in targets: try: doc = m.__doc__.strip().splitlines()[0].strip() except Exception: doc = '<no description>' print(' {0:<18} {1}'.format(target, doc)) print('') print('Global commands (without target):') cmds = [x for x in dir(self) if x.startswith('cmd_')] for cmd in cmds: name = cmd[4:] meth = getattr(self, cmd) if not meth.__doc__: continue doc = [x for x in meth.__doc__.strip().splitlines()][0].strip() print(' {0:<18} {1}'.format(name, doc)) print('') print('Target commands:') print(' clean Clean the target environment') print(' update Update the target dependencies') print(' debug Build the application in debug mode') print(' release Build the application in release mode') print(' deploy Deploy the application on the device') print(' run Run the application on the device') print(' serve Serve the bin directory via SimpleHTTPServer') for target, m in targets: mt = m.get_target(self) commands = mt.get_custom_commands() if not commands: continue print('') print('Target "{0}" commands:'.format(target)) for command, doc in commands: if not doc: continue doc = textwrap.fill(textwrap.dedent(doc).strip(), 59, subsequent_indent=' ' * 21) print(' {0:<18} {1}'.format(command, doc)) print('') def run_default(self): self.check_build_layout() if 'buildozer:defaultcommand' not in self.state: print('No default command set.') print('Use "buildozer setdefault <command args...>"') print('Use "buildozer help" for a list of all commands"') exit(1) cmd = self.state['buildozer:defaultcommand'] self.run_command(cmd) def run_command(self, args): while args: if not args[0].startswith('-'): break arg = args.pop(0) if arg in ('-v', '--verbose'): self.log_level = 2 elif arg in ('-h', '--help'): self.usage() exit(0) elif arg in ('-p', '--profile'): self.config_profile = args.pop(0) elif arg == '--version': print('Buildozer {0}'.format(__version__)) exit(0) self._merge_config_profile() self.check_root() if not args: self.run_default() return command, args = args[0], args[1:] cmd = 'cmd_{0}'.format(command) # internal commands ? if hasattr(self, cmd): getattr(self, cmd)(*args) return # maybe it's a target? targets = [x[0] for x in self.targets()] if command not in targets: print('Unknown command/target {}'.format( self.translate_target(command, inverse=True))) exit(1) self.set_target(command) self.target.run_commands(args) def check_root(self): '''If effective user id is 0, display a warning and require user input to continue (or to cancel)''' if IS_PY3: input_func = input else: input_func = raw_input warn_on_root = self.config.getdefault('buildozer', 'warn_on_root', '1') try: euid = os.geteuid() == 0 except AttributeError: if sys.platform == 'win32': import ctypes euid = ctypes.windll.shell32.IsUserAnAdmin() != 0 if warn_on_root == '1' and euid: print('\033[91m\033[1mBuildozer is running as root!\033[0m') print( '\033[91mThis is \033[1mnot\033[0m \033[91mrecommended, and may lead to problems later.\033[0m' ) cont = None while cont not in ('y', 'n'): cont = input_func('Are you sure you want to continue [y/n]? ') if cont == 'n': sys.exit() def cmd_init(self, *args): '''Create a initial buildozer.spec in the current directory ''' if exists('buildozer.spec'): print('ERROR: You already have a buildozer.spec file.') exit(1) copyfile(join(dirname(__file__), 'default.spec'), 'buildozer.spec') print('File buildozer.spec created, ready to customize!') def cmd_distclean(self, *args): '''Clean the whole Buildozer environment. ''' print("Warning: Your ndk, sdk and all other cached packages will be" " removed. Continue? (y/n)") if sys.stdin.readline().lower()[0] == 'y': self.info('Clean the global build directory') if not exists(self.global_buildozer_dir): return rmtree(self.global_buildozer_dir) def cmd_help(self, *args): '''Show the Buildozer help. ''' self.usage() def cmd_setdefault(self, *args): '''Set the default command to run when no arguments are given ''' self.check_build_layout() self.state['buildozer:defaultcommand'] = args def cmd_version(self, *args): '''Show the Buildozer version ''' print('Buildozer {0}'.format(__version__)) def cmd_serve(self, *args): '''Serve the bin directory via SimpleHTTPServer ''' try: from http.server import SimpleHTTPRequestHandler from socketserver import TCPServer except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler from SocketServer import TCPServer os.chdir(self.bin_dir) handler = SimpleHTTPRequestHandler httpd = TCPServer(("", SIMPLE_HTTP_SERVER_PORT), handler) print("Serving via HTTP at port {}".format(SIMPLE_HTTP_SERVER_PORT)) print("Press Ctrl+c to quit serving.") httpd.serve_forever() # # Private # def _merge_config_profile(self): profile = self.config_profile if not profile: return for section in self.config.sections(): # extract the profile part from the section name # example: [app@default,hd] parts = section.split('@', 1) if len(parts) < 2: continue # create a list that contain all the profiles of the current section # ['default', 'hd'] section_base, section_profiles = parts section_profiles = section_profiles.split(',') if profile not in section_profiles: continue # the current profile is one available in the section # merge with the general section, or make it one. if not self.config.has_section(section_base): self.config.add_section(section_base) for name, value in self.config.items(section): print('merged ({}, {}) into {} (profile is {})'.format( name, value, section_base, profile)) self.config.set(section_base, name, value) def _get_config_list_values(self, *args, **kwargs): kwargs['with_values'] = True return self._get_config_list(*args, **kwargs) def _get_config_list(self, section, token, default=None, with_values=False): # monkey-patch method for ConfigParser # get a key as a list of string, separated from the comma # check if an env var exists that should replace the file config set_config_token_from_env(section, token, self.config) # if a section:token is defined, let's use the content as a list. l_section = '{}:{}'.format(section, token) if self.config.has_section(l_section): values = self.config.options(l_section) if with_values: return [ '{}={}'.format(key, self.config.get(l_section, key)) for key in values ] else: return [x.strip() for x in values] values = self.config.getdefault(section, token, '') if not values: return default values = values.split(',') if not values: return default return [x.strip() for x in values] def _get_config_default(self, section, token, default=None): # monkey-patch method for ConfigParser # get an appropriate env var if it exists, else # get a key in a section, or the default # check if an env var exists that should replace the file config set_config_token_from_env(section, token, self.config) if not self.config.has_section(section): return default if not self.config.has_option(section, token): return default return self.config.get(section, token) def _get_config_bool(self, section, token, default=False): # monkey-patch method for ConfigParser # get a key in a section, or the default # check if an env var exists that should replace the file config set_config_token_from_env(section, token, self.config) if not self.config.has_section(section): return default if not self.config.has_option(section, token): return default return self.config.getboolean(section, token) def _get_config_raw_default(self, section, token, default=None, section_sep="=", split_char=" "): l_section = '{}:{}'.format(section, token) if self.config.has_section(l_section): return [ section_sep.join(item) for item in self.config.items(l_section) ] if not self.config.has_option(section, token): return default.split(split_char) return self.config.get(section, token).split(split_char)
class Config(Common): r""" Config class. This class checks for config file's presence. Also, this class manages TPB/KAT proxies; That is, obtains TPB/KAT URL and fetches proxies thorugh those URL. Proxies are stored as list and returned. By default, Config files is checked in $XDG_CONFIG_HOME/torrench/ and fallback to $HOME/.config/torrench/ directory (linux) For windows, default location is ~\.config\torrench This class inherits Common class. """ def __init__(self): """Initialisations.""" Common.__init__(self) self.config = SafeConfigParser() self.config_dir = os.getenv( 'XDG_CONFIG_HOME', os.path.expanduser(os.path.join('~', '.config'))) self.full_config_dir = os.path.join(self.config_dir, 'torrench') self.config_file_name = "config.ini" self.config_file_name_new = "config.ini.new" self.config_file = os.path.join(self.full_config_dir, self.config_file_name) self.config_file_new = os.path.join(self.full_config_dir, self.config_file_name_new) self.url = None self.name = None self.urllist = [] self.logger = logging.getLogger('log1') def file_exists(self): """To check whether config.ini file exists and is enabled or not.""" if os.path.isfile(self.config_file): self.config.read(self.config_file) enable = self.config.get('Torrench-Config', 'enable') if enable == '1': self.logger.debug("Config file exists and enabled!") return True def update_file(self): try: # Get updated copy of config.ini file. self.logger.debug("Downloading new config.ini file") url = "https://raw.githubusercontent.com/Thigoe/torrench/master/config.ini" self.logger.debug("Download complete. Saving file..") soup = self.http_request(url) res = soup.p.get_text() with open(self.config_file, 'w', encoding="utf-8") as f: f.write(res) self.logger.debug("Saved new file as {}".format( self.config_file)) # Read file and set enable = 1 self.config.read(self.config_file) self.logger.debug("Now enabling file") self.config.set('Torrench-Config', 'enable', '1') # Write changes to config.ini file (self.config_file) with open(self.config_file, 'w', encoding="utf-8") as configfile: self.config.write(configfile) self.logger.debug("File enabled successfull and saved.") print("Config file updated!") self.logger.debug("Config file updated successfully.") except Exception as e: print("Something went wrong. See logs for details.") self.logger.debug( "Something gone wrong while updating config file.") self.logger.exception(e) # To get proxies for KAT/TPB/... def get_proxies(self, name): """ Get Proxies. Proxies are read from config.ini file. """ self.logger.debug("getting proxies for '%s'" % (name)) temp = [] self.config.read(self.config_file) name = '{}_URL'.format(name.upper()) self.url = self.config.get('Torrench-Config', name) self.urllist = self.url.split() if name == 'TPB_URL': soup = self.http_request(self.urllist[-1]) link = soup.find_all('td', class_='site') del self.urllist[-1] for i in link: temp.append(i.a["href"]) self.urllist.extend(temp) elif name == "1337X_URL": soup = self.http_request(self.urllist[-1]) link = soup.findAll('td', class_='text-left') del self.urllist[-1] for i in link: temp.append(i.a["href"]) self.urllist.extend(temp) self.logger.debug("got %d proxies!" % (len(self.urllist))) return self.urllist
def conf(c): conf = SafeConfigParser(os.environ) conf.read(c('config_path')) return conf
import unittest import audiotools import tempfile import os import os.path from hashlib import md5 import random import decimal import test_streams import subprocess try: from configparser import SafeConfigParser except ImportError: from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read("test.cfg") def do_nothing(self): pass # add a bunch of decorator metafunctions like LIB_CORE # which can be wrapped around individual tests as needed for section in parser.sections(): for option in parser.options(section): if parser.getboolean(section, option): vars()["%s_%s" % (section.upper(), option.upper())] = lambda function: function else: