def read_categories(): db_type = ConfigReader.get_db_type() session_id = ConfigReader.get_session_id() if db_type == CategoryListHandler.db_type_fs: return DiskStorageCategoryListHandler.read_categories(session_id) else: return list()
def __init__(self, cmdargv, configpath): self.AI = None self.GameBoard = None self.Turn = "player" self.Config = ConfigReader(configpath) self.MCTSMode = True if self.Config.retrieve( "AIMODE") == "MCTS" else False self.BoardSize_X = self.Config.retrieve("BOARDSIZE_X") self.BoardSize_Y = self.Config.retrieve("BOARDSIZE_Y") self.Difficulty = self.Config.retrieve("DIFFICULTY") self.TileSearchRange = self.Config.retrieve("SEARCHRANGE") self.MCTSTimeLimit = self.Config.retrieve("TIMELIMIT") self.Use_XTA = True if self.Config.retrieve( "USE_EXTENSIVE_ANALYSIS") == "1" else False self.XTA_Coefficient = self.Config.retrieve("EA_COEFFICIENT") self.cmdargv = vars(cmdargv) self.DebugMode = self.cmdargv["debug"] self.MCTSMode = self.cmdargv["mcts"] self.HideConsole = self.cmdargv["noconsole"] self.HideGUI = self.cmdargv["nogui"] self.RunRemote = self.cmdargv["remote"] if self.cmdargv["stdcomm"]: self.IOMethod = "STDIO" elif self.cmdargv["silent"]: self.IOMethod = "SOCKET" self.socket_addr = self.cmdargv["socket"][0] self.socket_port = self.cmdargv["socket"][1] self.runlistener()
def __init__(self, data_dir, package_name, config_name, max_y, rest_frequency, savefig=True, vpos=0.4, vlsr=8.0, lines_file_name="lltable.9.json", spectrum_file_name="testCubeStats.tab"): self.data_dir = data_dir self.package_name = package_name self.config_name = config_name self.max_y = max_y self.rest_frequency = rest_frequency self.savefig = savefig self.vpos = vpos self.vlsr = vlsr self.lines_file_name = lines_file_name self.spectrum_file_name = spectrum_file_name self.configGenerator = ConfigGenerator(self.data_dir, self.package_name, self.lines_file_name, self.max_y, self.vpos, self.vlsr, self.config_name) self.configReader = ConfigReader(self.config_name) self.spectrumReader = SpectrumReader(self.data_dir, self.package_name, self.spectrum_file_name) self.moleculePrettyPlotPrinter = MoleculePrettyPrinter()
def create_session_configs(configs_location=None, delete_old_configs=1): if configs_location is None: configs_location = ConfigReader.get_configs_location() if delete_old_configs: Storage.delete_location(configs_location) configs = [SessionConfigReader.get_config_template()] configs = SessionConfigBuilderCustom1.add_all_config_info(configs) n_configs = len(configs) SessionLogger.log('Constructed ' + str(n_configs) + ' new session configs from template: \'' + ConfigReader.get_config_template_id() + '\'.') config_ids = list() idx = 0 for conf in configs: config_id = configs_location + '/' + SessionConfigBuilderCustom1.config_name + str( idx + 1) SessionConfigReader.set_config(conf, config_id) config_ids.append(config_id) idx = idx + 1 SessionLogger.log('Stored ' + str(n_configs) + ' session configs in \'' + configs_location + '\'.') return config_ids
def __init__(self, amqp_url, exchange_name=None, exchange_type=None, exchange_durable=False, queue_name=None, queue_durable=False, routing_key=None, confirm=False): """Create a new instance of the consumer class, passing in the AMQP URL used to connect to RabbitMQ. :param str amqp_url: The AMQP url to connect with """ self._consumer_tag = None self._connection = None self._channel = None self._url = amqp_url self._closing = False self._exchange_durable = exchange_durable self._queue_durable = queue_durable self._enable_confirmation = confirm # Read the global config file try: config = ConfigReader() config.read("OMQS.cfg") manager = OMQSLogManager(name=__name__, file_name=__name__) self._logger = manager.logger except Exception, e: logging.error('[OMQS][AsyncConsumer]: Failed to read the config or setting the logger: %s', str(e)) return None
class ServerConfig(object): ''' This class contains the configuration of the server ''' def __init__(self, configPath): self.config = ConfigReader(configPath) self.warehousePath = self.config.getWarehousePath() self.portNo = self.config.getPortNo() self.logDir = self.config.getLogDir() self.urlHandlers = c.URLHANDLERS def getLogDirectory(self): if not os.path.exists(self.logDir): os.makedirs(self.logDir) return self.logDir def getPortNo(self): return self.portNo def getWarehousePath(self): return self.warehousePath def getURLHandlers(self): return self.urlHandlers
def __init__(self): log_level = logging.DEBUG level = 'debug' configReader = ConfigReader() path = configReader.getConfig('LOG', 'filename') #print "1:",os.path.dirname(__file__) #print "2:",os.path.abspath() #logFilepath = os.path.abspath(os.path.dirname(__file__))+"/"+path logFilepath = os.path.abspath(os.pardir)+"/"+path #print "path confirm::",logFilepath #print"parent dir:", os.path.abspath(os.pardir) #print logFilepath logFile = logFilepath+str(datetime.now().date()) #print logFile level = configReader.getConfig('LOG', 'level') #logging.INFO #print level level = level.upper() #print level if (level == 'DEBUG'): log_level = logging.DEBUG elif (level == 'INFO'): log_level = logging.INFO elif (level == 'WARN'): log_level = logging.WARN elif (level == 'ERROR'): log_level = logging.ERROR elif (level == 'CRITICAL'): log_level = logging.CRITICAL logging.basicConfig(level=log_level, format=' %(levelname)-2s %(asctime)s %(name)s %(message)s', datefmt='%m-%d-%y %H:%M', filename=logFile)
def read_stopwords(): db_type = ConfigReader.get_db_type() session_id = ConfigReader.get_session_id() if db_type == StopwordHandler.db_type_fs: return DiskStorageStopwordHandler.read_stopwords(session_id) else: return list()
def add_stopwords(stopwords): db_type = ConfigReader.get_db_type() session_id = ConfigReader.get_session_id() if db_type == StopwordHandler.db_type_fs: DiskStorageStopwordHandler.add_stopwords(session_id, stopwords) SessionLogger.log( str(len(stopwords)) + ' stop words have been added.')
def externmodulecall(setting_path="../settings.config"): global root, configreader root = tkinter.Tk() configreader = ConfigReader(setting_path) MainScreen(root, GameBoard(configreader.retrieve("BOARDSIZE_X"), configreader.retrieve("BOARDSIZE_Y")), configreader) # in settings.config root.mainloop()
def __init__(self): self.pid = -1 self.needReaquireGameState = True self.needReacquireModule = True self.module_address = 0 self.original_facing = None self.config_reader = ConfigReader('memory_address') self.player_data_pointer_offset = self.config_reader.get_property( 'player_data_pointer_offset', MemoryAddressOffsets.player_data_pointer_offset.value, lambda x: int(x, 16))
def __init__(self, master, xy_size, window_name): print("Launching {}".format(window_name)) config_filename = "frame_data_overlay" self.tekken_config = ConfigReader(config_filename) is_windows_7 = 'Windows-7' in platform.platform() self.is_draggable_window = self.tekken_config.get_property( DisplaySettings.config_name(), DisplaySettings.overlay_as_draggable_window.name, False) self.is_minimize_on_lost_focus = self.tekken_config.get_property( DisplaySettings.config_name(), DisplaySettings.only_appears_when_Tekken_7_has_focus.name, True) self.is_transparency = self.tekken_config.get_property( DisplaySettings.config_name(), DisplaySettings.transparent_background.name, not is_windows_7) self.is_overlay_on_top = not self.tekken_config.get_property( DisplaySettings.config_name(), DisplaySettings.overlay_on_bottom.name, False) self.overlay_visible = False if master == None: self.toplevel = Tk() else: self.toplevel = Toplevel() self.toplevel.wm_title(window_name) self.toplevel.attributes("-topmost", True) self.background_color = CurrentColorScheme.dict[ ColorSchemeEnum.background] if self.is_transparency: self.tranparency_color = CurrentColorScheme.dict[ ColorSchemeEnum.transparent] self.toplevel.wm_attributes("-transparentcolor", self.tranparency_color) self.toplevel.attributes("-alpha", "0.75") else: if is_windows_7: print("Windows 7 detected. Disabling transparency.") self.tranparency_color = self.background_color self.toplevel.configure(background=self.tranparency_color) self.toplevel.iconbitmap('TekkenData/tekken_bot_close.ico') if not self.is_draggable_window: self.toplevel.overrideredirect(True) self.w = xy_size[0] self.h = xy_size[1] self.toplevel.geometry(str(self.w) + 'x' + str(self.h))
def __init__(self, keys, amqp_url=None, queue=None, queue_durable=False, log_callback=None, name='LogMsgConsumer'): try: config = ConfigReader() config.read("OMQS.cfg") exchange = config.LogConsumer.EXCHANGE ex_durable = config.LogConsumer.EXCHANGE_DURABLE if not queue: queue = config.LogConsumer.QUEUE except Exception, e: logging.error('[%s]: Failed to read the config or setting the logger: %r', name, e) return None
def add_evaluation(score, session_id=None): if session_id is None: session_id = ConfigReader.get_session_id() config_id = ConfigReader.get_config_id() evaluation_frame = Storage.load_pd_frame( EvaluationHandler.evaluations_id, session_id=session_id) timestamp_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S") row = len(evaluation_frame) evaluation_frame.at[row, EvaluationHandler.timestamp_col] = timestamp_str evaluation_frame.at[row, EvaluationHandler.session_id_col] = session_id evaluation_frame.at[row, EvaluationHandler.config_id_col] = config_id evaluation_frame.at[row, EvaluationHandler.score_col] = score Storage.store_pd_frame(evaluation_frame, EvaluationHandler.evaluations_id, session_id=session_id)
def getUrlWithHeaderRawData(vendor): #print(vendor) config_dict = ConfigReader.get_confic_dict() url = config_dict[Constants.Namdhari_s][ 'http_scheme'] + Constants.COLON + Constants.DOUBLEFORWARDSLASH + config_dict[ Constants.Namdhari_s]['base_url'] + vendor[ 'product_query'] + Constants.FORWARDSLASH + vendor[ 'product_id'] + Constants.DOT + config_dict[ Constants.Namdhari_s]['url_extension'] #print(vendor['product_vpid']) #print(vendor) try: #print(vendor['product_vpid']) if vendor['product_vpid'] != "NA": url += Constants.QUESTION_MARK + "vpid" + Constants.EQUALS + vendor[ 'product_vpid'] except KeyError: print( "product_vpid is not found, normal url will be used for quering " + vendor['product_name'] + " || vendor is " + vendor['vendor_name']) #print(url) cookie_header = requests.cookies.RequestsCookieJar() cookie_header.set('Site_Config', vendor['location_query']) raw_data = Constants.NONE return url, cookie_header, raw_data
def queryWebsite(url,cookie_header,raw_data,method_type): #print(url) #print(raw_data) config_dict=ConfigReader.get_confic_dict() return_list=[] price_vendor_list=[] read_response = Utils.makeRequestUrl(url,cookie_header,raw_data,method_type) #print(read_response.text) if read_response and read_response != Constants.EXCEPTION_QUERY: #print(read_response) soup = BeautifulSoup(read_response.text,'html.parser') extracted_data=soup.find(id=config_dict[Constants.Healthy_Buddha]['id_value']).find_all('option') #print(extracted_data) if extracted_data : #print(extracted_data) data = extracted_data[Constants.NUM_0] #print(data) sku_unit=str(data).split(Constants.DASH+Constants.SPACE+Constants.LESSER_OP)[Constants.NUM_0].split(Constants.GREATER_OP)[Constants.NUM_1] sku_value=str(data).split(Constants.DASH+Constants.SPACE+Constants.LESSER_OP)[Constants.NUM_1].split(Constants.GREATER_OP)[Constants.NUM_2].split(Constants.LESSER_OP)[Constants.NUM_0] price_vendor_list.append(Constants.Healthy_Buddha.replace(Constants.UNDERSCORE,Constants.SPACE)) price_vendor_list.append(sku_value.strip()) #print(price_vendor_list) return_list.append(price_vendor_list) price_vendor_list=[] price_vendor_list.append(Constants.Healthy_Buddha.replace(Constants.UNDERSCORE,Constants.SPACE)+Constants.SPACE+Constants.STR_SKU) price_vendor_list.append(sku_unit) #print(price_vendor_list) return_list.append(price_vendor_list) if config_dict[Constants.Healthy_Buddha]['discounted_is_required'] == str(True): print("if True then will add the discounted price also not required in this case, you can add later") #print(return_list) return return_list
def getUrlWithHeaderRawData(vendor): #print(vendor) config_dict=ConfigReader.get_confic_dict() url=config_dict[Constants.Healthy_Buddha]['http_scheme']+Constants.COLON+Constants.DOUBLEFORWARDSLASH+config_dict[Constants.Healthy_Buddha]['base_url']+vendor['product_query'] cookie_header=Constants.NONE raw_data=Constants.NONE return url,cookie_header,raw_data
def queryWebsite(url, cookie_header, raw_data, method_type): #print(raw_data) config_dict = ConfigReader.get_confic_dict() return_list = [] price_vendor_list = [] data = Utils.makeRequestUrl(url, cookie_header, raw_data, method_type).json() #print(data) if data: price_vendor_list.append( Constants.Townness.replace(Constants.UNDERSCORE, Constants.SPACE)) price_vendor_list.append(data[Constants.NUM_0][config_dict[ Constants.Townness]['mrp_syntax']]) return_list.append(price_vendor_list) if config_dict[ Constants.Townness]['discounted_is_required'] == str(True): #print("if True then will add the discounted price also not required in this case, you can add later") price_vendor_list = [] price_vendor_list.append( Constants.Townness_Discounted.replace( Constants.UNDERSCORE, Constants.SPACE)) price_vendor_list.append(data[Constants.NUM_0][config_dict[ Constants.Townness]['sp_syntax']]) return_list.append(price_vendor_list) return return_list
def recDataPersist(data_dict): #print(data_dict) config_dict=ConfigReader.get_confic_dict() status,fileName=ModelMonthlyBucketDataPersister.createExcelFile() if status == Constants.STR_SUCCESS: ModelMonthlyBucketDataPersister.loadExcelAddSheet(fileName,config_dict[Constants.STRING_APP]['daily_excel_sheet_one_name'].replace(Constants.UNDERSCORE,Constants.SPACE)) ModelMonthlyBucketDataPersister.firt2RowPersistInSheet(fileName,config_dict[Constants.STRING_APP]['daily_excel_sheet_one_name'].replace(Constants.UNDERSCORE,Constants.SPACE)) ModelMonthlyBucketDataPersister.writeDictDataInExcelModelMontlyBasket(data_dict,fileName,config_dict[Constants.STRING_APP]['daily_excel_sheet_one_name'].replace(Constants.UNDERSCORE,Constants.SPACE))
def __init__(self, conf): self.conf = conf self.compass_connected = False self.compass_conf = ConfigReader.read_json(conf["compass"]["file"]) self.heading = None ProcessDriver.__init__(self, compass_process, (conf, )) self.daemon = conf["daemon"] self.declination_deg = conf["compass"]["declination_deg"]
def getUrlWithHeaderRawData(vendor): #print(vendor) config_dict=ConfigReader.get_confic_dict() url=config_dict[Constants.Gournet_Garden]['http_scheme']+Constants.COLON+Constants.DOUBLEFORWARDSLASH+config_dict[Constants.Big_basket]['base_url']+vendor['product_id']+Constants.FORWARDSLASH+vendor['product_query']+Constants.FORWARDSLASH cookie_header=requests.cookies.RequestsCookieJar() cookie_header.set('_bb_cid',vendor['location_query']) raw_data=Constants.NONE return url,cookie_header,raw_data
def __init__(self): print("Tekken Bot Starting...") self.tekken_config = ConfigReader("frame_data_overlay") self.launcher = FrameDataLauncher() self.overlay_visible = False Tk.__init__(self) self.wm_title("Tekken Bot: Frame Data Overlay") self.wm_attributes("-transparentcolor", "white") self.attributes("-topmost", True) self.attributes("-alpha", "0.75") self.iconbitmap('TekkenData/tekken_bot_close.ico') self.overrideredirect(True) self.configure(background='white') self.s = Style() self.s.theme_use('alt') self.s.configure('.', background='black') self.s.configure('.', foreground='black') Grid.columnconfigure(self, 0, weight=0) Grid.columnconfigure(self, 1, weight=1) Grid.columnconfigure(self, 2, weight=0) Grid.rowconfigure(self, 0, weight=1) Grid.rowconfigure(self, 1, weight=0) self.fa_p1_var = self.create_frame_advantage_label(0) self.fa_p2_var = self.create_frame_advantage_label(2) self.at_p1_var = self.create_attack_type_label(0) self.at_p2_var = self.create_attack_type_label(2) self.text = self.create_textbox() self.stdout = sys.stdout self.redirector = TextRedirector(self.stdout, self.text, self.s, self.fa_p1_var, self.fa_p2_var, self.at_p1_var, self.at_p2_var) self.redirect_stdout() print("move | type | startup | damage | block | hit | active") self.restore_stdout()
def clear(): conf_keys = list() conf_keys.append(SessionLogger.database_type_key) conf_keys.append(SessionLogger.session_id_key) conf_values = ConfigReader.read_values(conf_keys) db_type = conf_values[0] session_id = conf_values[1] if db_type == SessionLogger.db_type_fs: DiskStorageSessionLogger.clear(session_id)
def __init__(self, amqp_url=None, keys=None, queue=None, evt_callback=None, name='EventMsgConsumer'): self._logger = None try: config = ConfigReader() config.read("OMQS.cfg") exchange = config.EventConsumer.EXCHANGE ex_durable = config.EventConsumer.EXCHANGE_DURABLE if not keys: keys = config.EventConsumer.KEYS if not queue: queue = config.EventConsumer.QUEUE except Exception, e: logging.error('[%s]: Failed to read the config or setting the logger: %r', name, e)
def __init__(self, output=True, debug=False, feeding=False): Logger.debug = debug Logger.out = output self.vs = Vs().start() self.hd = Hd(self.vs.getFrames("BGR")[-2]) self.gd = Gd() #self.gc = AsyncControlRepeater(Gc(ConfigReader.default()), maxBufferSize=1).start() self.gc = Gc(ConfigReader.default(), feeding=feeding) self.feeding = feeding
def __init__(self, configPath): self.config = ConfigReader(configPath) self.warehousePath = self.config.getWarehousePath() self.portNo = self.config.getPortNo() self.logDir = self.config.getLogDir() self.urlHandlers = c.URLHANDLERS
def log(text: str, log_type='info'): conf_keys = list() conf_keys.append(SessionLogger.database_type_key) conf_keys.append(SessionLogger.session_id_key) conf_keys.append(SessionLogger.conf_id_key) conf_values = ConfigReader.read_values(conf_keys) db_type = conf_values[0] session_id = conf_values[1] conf_id = conf_values[2] if db_type == SessionLogger.db_type_fs: DiskStorageSessionLogger.log(text, session_id, conf_id, log_type)
def run_config_tests(run_import=0, run_preprocessing=0, run_vectorization=0, config_ids=None, resume_at_idx=0): if config_ids is None: config_ids = SessionConfigBuilder.create_session_configs() n_configs = len(config_ids) idx = resume_at_idx while idx < len(config_ids): config_id = config_ids[idx] ConfigReader.set_session_config_id(config_id) SetupRunner.run_setup(run_import=run_import, run_preprocessing=run_preprocessing, run_vectorization=run_vectorization, run_classification=0) res = SetupRunner.run_classification_test() score = ClassificationInterpreter.evaluate_output(res) idx = idx + 1 SessionLogger.log('Evaluated config # ' + str(idx) + ' / ' + str(n_configs) + ' . Score: ' + str(score)) EvaluationHandler.sort() evaluations = EvaluationHandler.load_evaluations() return evaluations
def AppendFileName(): config_dict=ConfigReader.get_confic_dict() excelNameAppender=config_dict[Constants.STRING_APP]['daily_excel_base_path']+config_dict[Constants.STRING_APP]['daily_excel_file_name_prefix'].replace(Constants.UNDERSCORE,Constants.SPACE)+Constants.UNDERSCORE if config_dict[Constants.STRING_APP]['daily_excel_appender_type'] == Constants.STRING_DATE: excelNameAppender=excelNameAppender+datetime.today().strftime(config_dict[Constants.STRING_APP]['daily_excel_type_format']) else: print("If appender for the file name changed will add implemetation here in future") excelNameAppender=excelNameAppender+Constants.DOT+config_dict[Constants.STRING_APP]['daily_excel_extension'] return excelNameAppender
def getUrlWithHeaderRawData(vendor): #print(vendor) config_dict = ConfigReader.get_confic_dict() url = config_dict[Constants.Gournet_Garden][ 'http_scheme'] + Constants.COLON + Constants.DOUBLEFORWARDSLASH + config_dict[ Constants.Townness]['base_url'] cookie_header = Constants.NONE raw_data = "categoryid=" + vendor[ 'category_id'] + "&productid=" + vendor[ 'product_id'] + "&type=getproduct" return url, cookie_header, raw_data
def __init__(self, amqp_url=None, exchange_name=None, exchange_type=None, exchange_durable=False): """Setup the example publisher object, passing in the URL we will use to connect to RabbitMQ. :param str amqp_url: The URL for connecting to RabbitMQ """ self._connection = None self._channel = None self._message_number = 0 self._ready = False self._url = amqp_url self._exchange_durable = exchange_durable # Read the global config file config = ConfigReader() config.read("OMQS.cfg") if not self._url: self._url = config.Global.MQURL if not self._url: self._url = URL if exchange_name: self._exchange = exchange_name else: self._exchange = config.SyncPublisher.EXCHANGE if not self._exchange: self._exchange = EXCHANGE if exchange_type: self._exchange_type = exchange_type else: self._exchange_type = config.SyncPublisher.EXCHANGE_TYPE if not self._exchange_type: self._exchange_type = EXCHANGE_TYPE self._default_key = config.SyncPublisher.ROUTING_KEYS if not self._default_key: self._default_key = ROUTING_KEYS
def __init__(self, config_file): self.config_file = config_file # an yml file configReader = ConfigReader() self.config = configReader.read(self.config_file) # is a dict of lists of dicts (see yml file structure) self.class_name = '' self.data_name = 'PrivateData' # Verify all names for key, value in self.config.items(): if key == 'class': #print value verifyName(value) self.class_name = value else: # methods, properties, private_data for item in value: #print item['name'] verifyName(item['name']) # TODO: check for duplicates # Retrieve private data names private_data_names = [] if 'private_data' in self.config.keys(): dictlist = self.config['private_data'] private_data_names = [dictio['name'] for dictio in dictlist] #print private_data_names # Check if a property refers to a non-existing data field if 'properties' in self.config.keys(): for item in self.config['properties']: referred_data = tryRead(item,'modifies') if referred_data and referred_data not in private_data_names: print ('%s property modifies %s private_data field. Autocreating.' % (item['name'], item['modifies'])) # Add private data field #print item if not 'private_data' in self.config.keys(): self.config['private_data'] = [] self.config['private_data'].append({'name':item['name'], 'returns':item['returns'], 'default':''}) return
def queryWebsite(url, cookie_header, raw_data, method_type): #print(url) #print(raw_data) config_dict = ConfigReader.get_confic_dict() return_list = [] price_vendor_list = [] read_response = Utils.makeRequestUrl(url, cookie_header, raw_data, method_type) #print(r.text) if read_response and read_response != Constants.EXCEPTION_QUERY: #print(read_response) soup = BeautifulSoup(read_response.text, features="html.parser") parsed_txt = soup.find_all( config_dict[Constants.Organic_World]['tag_value']) #print(parsed_txt) extracted_data = Constants.NONE for txt in parsed_txt: if re.search('var meta', txt.text): #print(txt.text) extracted_data = txt.text break if extracted_data != Constants.NONE: data_str = extracted_data.split("var meta = ")[ Constants.NUM_1].split(';\nfor')[Constants.NUM_0] data = json.loads(data_str) any_one_variant = data['product']['variants'][Constants.NUM_0] sku_value = any_one_variant['price'] / 100 sku_unit = any_one_variant['public_title'] if sku_unit is None: sku_unit = any_one_variant['name'].split( Constants.COMMA)[Constants.NUM_1] price_vendor_list.append( Constants.Organic_World.replace(Constants.UNDERSCORE, Constants.SPACE)) price_vendor_list.append(sku_value) #print(price_vendor_list) return_list.append(price_vendor_list) #print(sku_value+Constants.SPACE+sku_unit) price_vendor_list = [] price_vendor_list.append( Constants.Organic_World.replace(Constants.UNDERSCORE, Constants.SPACE) + Constants.SPACE + Constants.STR_SKU) price_vendor_list.append(sku_unit) #print(price_vendor_list) return_list.append(price_vendor_list) if config_dict[Constants.Organic_World][ 'discounted_is_required'] == str(True): print( "if True then will add the discounted price also not required in this case, you can add later" ) print(return_list) return return_list
def set_config(json_f, config_id=None): conf_keys = list() conf_keys.append(SessionConfigReader.database_type_key) conf_keys.append(SessionConfigReader.config_id_key) conf_keys.append(SessionConfigReader.session_id_key) conf_values = ConfigReader.read_values(conf_keys) db_type = conf_values[0] if config_id is None: config_id = conf_values[1] session_id = conf_values[2] if db_type == SessionConfigReader.db_type_fs: DiskStorageSessionConfigReader.set_config(session_id, config_id, json_f)
def __init__(self): config_dict = ConfigReader.get_confic_dict() self.HOST_NAME = config_dict[Constants.SMTP_DICT][Constants.HOST_NAME] self.HOST_PORT = config_dict[Constants.SMTP_DICT][Constants.HOST_PORT] self.ALLOWSSL = config_dict[Constants.SMTP_DICT][ Constants.SMTP_ALLOWSSL] self.ALLOWSTARTTLS = config_dict[Constants.SMTP_DICT][ Constants.SMTP_ALLOWSTARTTLS] self.SENDER = config_dict[Constants.SMTP_DICT][Constants.SMTP_SENDER] self.PASSWORD = str( base64.b64decode( config_dict[Constants.SMTP_DICT][Constants.SMTP_PASSWORD]), "utf-8")
def set_best_performing_by_ids(session_id=None, config_id=None): conf_keys = list() conf_keys.append(SessionConfigReader.database_type_key) conf_keys.append(SessionConfigReader.config_id_key) conf_keys.append(SessionConfigReader.session_id_key) conf_values = ConfigReader.read_values(conf_keys) db_type = conf_values[0] if config_id is None: config_id = conf_values[1] if session_id is None: session_id = conf_values[2] if db_type == SessionConfigReader.db_type_fs: DiskStorageSessionConfigReader.set_best_performing_by_ids( session_id, config_id)
def read_values(keys): conf_keys = list() conf_keys.append(SessionConfigReader.database_type_key) conf_keys.append(SessionConfigReader.config_id_key) conf_keys.append(SessionConfigReader.session_id_key) conf_values = ConfigReader.read_values(conf_keys) db_type = conf_values[0] config_id = conf_values[1] session_id = conf_values[2] if db_type == SessionConfigReader.db_type_fs: return DiskStorageSessionConfigReader.read_values( keys, session_id, config_id) else: return list()
def __init__(self, amqp_url=None, exchange_name=None, exchange_type=None, exchange_durable=False, queue_name=None, queue_durable=False, confirm=False): """Setup the example publisher object, passing in the URL we will use to connect to RabbitMQ. :param str amqp_url: The URL for connecting to RabbitMQ """ self._connection = None self._channel = None self._acked = 0 self._nacked = 0 self._message_number = 0 self._stopping = False self._ready = False self._url = amqp_url self._closing = False self._exchange_durable = exchange_durable self._queue_durable = queue_durable self._enable_confirmation = confirm # Read the global config file try: config = ConfigReader() config.read("OMQS.cfg") manager = OMQSLogManager(name=__name__, file_name=__name__) self._logger = manager.logger except Exception, e: logging.error('[OMQS][%s]: Failed to read the config or setting the logger: %r', __name__, e) return None
def main(): # Parse all command line arguments using the argparse module parser = argparse.ArgumentParser(description='') parser.add_argument("-c", "--config", help="Configuration file") args = parser.parse_args() if args.config is None: print '[ hps-learn ]: A configuration file needs to be specified!' sys.exit(2) # Parse the configuration config_reader = ConfigReader(args.config) # If signal files were specified, open them and get the specified ROOT # TTree objects. Otherwise, warn the user that no signal files were signal_files = config_reader.get_signal_files() signal_root_files = list() signal_root_trees = list() if signal_files : for signal_file, tree_name in signal_files.iteritems(): print "[ hps-learn ]: Loading signal file " + signal_file + " with TTree name " + tree_name signal_root_files.append(root.TFile(signal_file)) signal_root_trees.append(signal_root_files[-1].Get(tree_name)) else : print "[ hps-learn ]: At least a single signal file needs to be specified." sys.exit(2) # If background files were specified, open them and get the specified ROOT # TTree objects. Otherwise, warn the user that no background files were # specified and exit. bkg_files = config_reader.get_background_files() bkg_root_files = list() bkg_root_trees = list() if bkg_files: for bkg_file, tree_name in bkg_files.iteritems(): print "[ hps-learn ]: Loading background file " + bkg_file + " with TTree name " + tree_name bkg_root_files.append(root.TFile(bkg_file)) bkg_root_trees.append(bkg_root_files[-1].Get(tree_name)) else : print "[ hps-learn ]: At least a single background file needs to be specified." sys.exit(2) root.TMVA.Tools.Instance() tmva_file = root.TFile("tmva_output.root", "RECREATE") factory = root.TMVA.Factory("TMVA_Analysis", tmva_file, ":".join(["!V", "!Silent", "Color", "DrawProgressBar", "Transformations=I;D;P;G;D", "AnalysisType=Classification"])) for signal_tree in signal_root_trees: factory.AddSignalTree(signal_tree) for bkg_tree in bkg_root_trees: factory.AddBackgroundTree(bkg_tree) variables = config_reader.get_training_variables() for variable, variable_type in variables.iteritems(): print "Adding training variable " + variable + " of type " + variable_type factory.AddVariable(variable, variable_type) signal_cuts = root.TCut("") bkg_cuts = root.TCut("") factory.PrepareTrainingAndTestTree(signal_cuts, bkg_cuts, ":".join(["nTrain_Signal=0", "nTrain_Background=0", "SplitMode=Random", "NormMode=NumEvents", "!V"])) method = factory.BookMethod(root.TMVA.Types.kMLP, "MLP_ANN", "") factory.TrainAllMethods() factory.TestAllMethods() factory.EvaluateAllMethods()
# usernames[thisUsername] = 1 # if thisIPAddr in ip_addresses: # ip_addresses[thisIPAddr] += 1 # else: # ip_addresses[thisIPAddr] = 1 else: print "PARSE ERROR on line: " + line # ------------------------------------------------------------------------------- # Get the current list of denied IP addreses. We'll use that to ensure we don't # add duplicates denied_ips = [] cr = ConfigReader(hostsdenyfile) cr.read() config = cr.get_config() for line in config: parts = line.split(":") denied_ips.append(parts[1].strip()) status_update = "DEBUG: hosts.deny file currently has " + str(len(denied_ips)) + " denied IP addresses." email_msg += status_update + "\n" if verbose: print status_update # ------------------------------------------------------------------------------- # Determine if there are any new IP addresses that we should ban.
def __init__(self, name='BaseSyncConsumer', amqp_url=None, exchange_name=None, exchange_type=None, exchange_durable=False, queue_name=None, queue_durable=False, routing_keys=None, no_ack=True, callback=None): """Setup the example publisher object, passing in the URL we will use to connect to RabbitMQ. :param str amqp_url: The URL for connecting to RabbitMQ """ self._name = name self._connection = None self._channel = None self._message_number = 0 self._exchange_durable = exchange_durable self._queue_durable = queue_durable self._no_ack = no_ack self._callback = callback manager = OMQSLogManager(name=name, file_name=name) self._logger = manager.logger # Read the global config file config = ConfigReader() config.read("OMQS.cfg") if amqp_url: self._url = amqp_url else: self._url = config.Global.MQURL if not self._url: self._url = URL if exchange_name: self._exchange = exchange_name else: self._exchange = config.SyncPublisher.EXCHANGE if not self._exchange: self._exchange = EXCHANGE if exchange_type: self._exchange_type = exchange_type else: self._exchange_type = config.SyncPublisher.EXCHANGE_TYPE if not self._exchange_type: self._exchange_type = EXCHANGE_TYPE if queue_name: self._queue = queue_name else: self._queue = config.SyncPublisher.QUEUE if not self._queue: self._queue = QUEUE if routing_keys: self._routing_keys = routing_keys else: self._keys = [] str_keys = config.SyncPublisher.ROUTING_KEYS if str_keys: keys = str_keys.split(',') for key in keys: self._routing_keys.append(key.strip()) else: self._routing_keys = KEYS if not self._callback: self._callback = default_callback