def __init__(self, argv=None): if self.server_name is None: self.server_name = self.__class__.__name__ if argv is None: argv = sys.argv conf_path, options = self.parseArgs(argv) self.default_log_path = self.server_name + '.log' self.default_pid_path = self.server_name + '.pid' self.server = None self.quit = False # read conf conf = ConfigParser() conf.read(conf_path) self.conf_path = conf_path self.host = conf.get('server', 'host') self.port = conf.getint('server', 'port') try: self.pid_path = conf.get('server', 'pid_path') except NoOptionError: self.pid_path = self.default_pid_path try: log_path = conf.get('server', 'log_path') except NoOptionError: log_path = self.default_log_path if is_server_running(self.host, self.port): trace("Server already running on %s:%s." % (self.host, self.port)) sys.exit(0) trace('Starting %s server at http://%s:%s/' % (self.server_name, self.host, self.port)) # init logger if options.verbose: level = logging.DEBUG else: level = logging.INFO if options.debug: log_to = 'file console' else: log_to = 'file' self.logger = get_default_logger(log_to, log_path, level=level, name=self.server_name) # subclass init self._init_cb(conf, options) # daemon mode if not options.debug: trace(' as daemon.\n') close_logger(self.server_name) create_daemon() # re init the logger self.logger = get_default_logger(log_to, log_path, level=level, name=self.server_name) else: trace(' in debug mode.\n') # init rpc self.initServer()
def _funkload_init(self): """Initialize a funkload test case using a configuration file.""" # look into configuration file config_path = getattr(self._options, 'config', None) if not config_path: config_directory = os.getenv('FL_CONF_PATH', '.') config_path = os.path.join(config_directory, self.__class__.__name__ + '.conf') config_path = os.path.abspath(os.path.expanduser(config_path)) if not os.path.exists(config_path): config_path = "Missing: " + config_path config = ConfigParser() config.read(config_path) self._config = config self._config_path = config_path self.conf = ConfSectionFinder(self) self.default_user_agent = self.conf_get('main', 'user_agent', 'FunkLoad/%s' % get_version(), quiet=True) if self.in_bench_mode: section = 'bench' else: section = 'ftest' self.setOkCodes( self.conf_getList(section, 'ok_codes', [200, 301, 302, 303, 307], quiet=True)) self.sleep_time_min = self.conf_getFloat(section, 'sleep_time_min', 0) self.sleep_time_max = self.conf_getFloat(section, 'sleep_time_max', 0) self._simple_fetch = self.conf_getInt(section, 'simple_fetch', 0, quiet=True) self.log_to = self.conf_get(section, 'log_to', 'console file') self.log_path = self.conf_get(section, 'log_path', 'funkload.log') self.result_path = os.path.abspath( self.conf_get(section, 'result_path', 'funkload.xml')) # init loggers if self.in_bench_mode: level = logging.INFO else: level = logging.DEBUG self.logger = get_default_logger(self.log_to, self.log_path, level=level) self.logger_result = get_default_logger(log_to="xml", log_path=self.result_path, name="FunkLoadResult") #self.logd('_funkload_init config [%s], log_to [%s],' # ' log_path [%s], result [%s].' % ( # self._config_path, self.log_to, self.log_path, self.result_path)) # init webunit browser (passing a fake methodName) self._browser = WebTestCase(methodName='log') self.clearContext()
def _funkload_init(self): """Initialize a funkload test case using a configuration file.""" # look into configuration file config_path = getattr(self._options, 'config', None) if not config_path: config_directory = os.getenv('FL_CONF_PATH', '.') config_path = os.path.join(config_directory, self.__class__.__name__ + '.conf') config_path = os.path.abspath(os.path.expanduser(config_path)) if not os.path.exists(config_path): config_path = "Missing: "+ config_path config = ConfigParser() config.read(config_path) self._config = config self._config_path = config_path self.conf = ConfSectionFinder(self) self.default_user_agent = self.conf_get('main', 'user_agent', 'FunkLoad/%s' % get_version(), quiet=True) if self.in_bench_mode: section = 'bench' else: section = 'ftest' self.setOkCodes( self.conf_getList(section, 'ok_codes', [200, 301, 302, 303, 307], quiet=True) ) self.sleep_time_min = self.conf_getFloat(section, 'sleep_time_min', 0) self.sleep_time_max = self.conf_getFloat(section, 'sleep_time_max', 0) self._simple_fetch = self.conf_getInt(section, 'simple_fetch', 0, quiet=True) self.log_to = self.conf_get(section, 'log_to', 'console file') self.log_path = self.conf_get(section, 'log_path', 'funkload.log') self.result_path = os.path.abspath( self.conf_get(section, 'result_path', 'funkload.xml')) # init loggers if self.in_bench_mode: level = logging.INFO else: level = logging.DEBUG self.logger = get_default_logger(self.log_to, self.log_path, level=level) self.logger_result = get_default_logger(log_to="xml", log_path=self.result_path, name="FunkLoadResult") #self.logd('_funkload_init config [%s], log_to [%s],' # ' log_path [%s], result [%s].' % ( # self._config_path, self.log_to, self.log_path, self.result_path)) # init webunit browser (passing a fake methodName) self._browser = WebTestCase(methodName='log') self.clearContext()
def analyze_posting_lists(): logger = utils.get_default_logger() logger.addHandler( logging.FileHandler(filename="output/posting_lists_sizes.txt", mode="w")) logger.info("Creating posting lists with 128 coding...") postings_lists_128_coded = _create_terms_posting_lists(coding=128) logger.info("Creating posting lists with 16 coding...") postings_lists_16_coded = _create_terms_posting_lists(coding=16) ordered_lists = [ posting_list.decode_to_ordered_list() for posting_list in postings_lists_128_coded ] bytes_in_128_posting_lists = _count_bytes_in_postings_lists( postings_lists_128_coded) bytes_in_16_posting_lists = _count_bytes_in_postings_lists( postings_lists_16_coded) bytes_in_int32_lists = _count_int32_bytes_in_ordered_lists(ordered_lists) bytes_in_gamma_coding_lists = _count_bytes_in_gamma_coding_lists( ordered_lists) logger.info( f"Bytes in 128 coded posting lists: {bytes_in_128_posting_lists}") logger.info( f"Bytes in 16 coded posting lists: {bytes_in_16_posting_lists}") logger.info(f"Bytes in int32 lists: {bytes_in_int32_lists}") logger.info(f"Bytes in gamma coding lists: {bytes_in_gamma_coding_lists}")
def run_article_finder(input_arguments): logger = utils.get_default_logger() logger.info("Reading wiki articles...") wiki_articles = articles.read_wiki_articles() article_embeddings = get_article_embeddings(input_arguments, wiki_articles, logger) logger.info("Creating n-grams...") ids_wiki_articles = {x.id: x for x in wiki_articles} trigrams_article_ids = create_trigrams_article_ids(wiki_articles) logger.info("All data prepared successfully...") while True: try: query = input().lower() process_query(query, input_arguments, article_embeddings, ids_wiki_articles, trigrams_article_ids) except EOFError: print("All input processed") break
def run_index_creator(index_type, use_terms_clusters): logger = utils.get_default_logger() logger.info("Creating index storage...") logger.info("Reading wiki articles...") wiki_articles = utils.read_wiki_articles() logger.info("Creating index...") index_data = _create_index(index_type, wiki_articles, use_terms_clusters) with IndexStorage(index_type, use_terms_clusters, truncate_old=True) as index_storage: logger.info("Saving wiki articles to index storage") for wiki_article in wiki_articles: index_storage.add_wiki_article(wiki_article) _save_index_data(index_data, index_storage, logger) logger.info("Saving words base forms to index storage") words_base_forms = utils.read_words_base_forms() for word, base_forms in words_base_forms.items(): index_storage.add_word_base_forms(word, base_forms) logger.info("Index created successfully")
import os import sys import json import utils import tempfile import numpy as np import tensorflow as tf import tensorflow.contrib.slim as slim config = utils.load_config() sys.path.append(os.path.expanduser(config['slim_path'])) resnet = utils.load_module('nets.resnet_v1') logger = utils.get_default_logger() class WildCat: def __init__(self, images, labels=None, n_classes=None, training=False, transfer_conv_size=(3, 3), n_maps_per_class=5, alpha=1.0, k=1, reg=None): self.images = images self.labels = labels self.n_classes = n_classes self.training = training self.transfer_conv_size = transfer_conv_size