def test01_no_destination_no_connection(self): Config().__set_prop__(Config.key_use_netloc, "False") DestinationMap().__remove_destination__("http://bla.com") des.reporter.reset_instance() logger.debug("\n============ no destination =============\n") # returns at no destination relisync = Relisync("http://bla.com") self.assertEqual(Status.init, relisync.status) relisync.process_source() self.assertEqual(1, len(relisync.exceptions)) self.assertEqual("No destination for http://bla.com", relisync.exceptions[0]) self.assertEqual(Status.processed_with_exceptions, relisync.status) # cannot get connection and ends up in caught exception DestinationMap().__set_destination__("http://bla.com", "destination_x") logger.debug("\n============destination, no connection =============\n") relisync = Relisync("http://bla.com") relisync.process_source() self.assertEqual(1, len(relisync.exceptions)) self.assertEqual(Status.processed_with_exceptions, relisync.status) reporter = des.reporter.instance() self.assertEqual(2, len(reporter.sync_status)) self.assertIsNotNone(reporter.sync_status[0].exception) # using net location 'bla.com' as destination, still no connection Config().__set_prop__(Config.key_use_netloc, "True") DestinationMap().__remove_destination__("http://bla.com") logger.debug("\n=========== using netloc, still no connection ==============\n") relisync = Relisync("http://bla.com") relisync.process_source() self.assertEqual(1, len(relisync.exceptions)) self.assertEqual(Status.processed_with_exceptions, relisync.status) self.assertEqual(3, len(reporter.sync_status)) self.assertIsNotNone(reporter.sync_status[1].exception)
def test_02_change(self): Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__set_destination__("http://localhost:8000/rs/source/s1", "rs/destination/d1") __clear_destination__("d1") __clear_sources_xml__("s1") __create_resourcelist__("s1") logger.debug("\n=========== create ==============\n") relisync = Relisync("http://localhost:8000/rs/source/s1/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) __change_resource__("s1", "resource1.txt") __create_changelist__("s1") logger.debug("\n=========== change ==============\n") chanlisync = Chanlisync("http://localhost:8000/rs/source/s1/changelist.xml") chanlisync.process_source() self.assertEqual(0, len(chanlisync.exceptions)) self.assertEqual(Status.processed, chanlisync.status) reporter = des.reporter.instance() self.assertEqual(4, len(reporter.sync_status)) #self.assertEqual(1, reporter.sync_status[3].same) self.assertIsNone(reporter.sync_status[3].same) self.assertEqual(0, reporter.sync_status[3].created) self.assertEqual(1, reporter.sync_status[3].updated) self.assertEqual(0, reporter.sync_status[3].deleted) self.assertEqual(0, reporter.sync_status[3].to_delete) self.assertIsNone(reporter.sync_status[3].exception) reporter.sync_status_to_file("logs/incremental-change.csv")
def test_inject_dependencies(self): Config.__set_config_filename__("test-files/config.txt") Config().__set_prop__(Config.key_des_processor_listeners, "des.processor_listener.SitemapWriter, des.processor.ProcessorListener") runner = DesRunner() self.assertEqual(2, len(des.processor.processor_listeners))
def test07_process_source(self): # connection and readable resourcesync, write sitemap to file try: shutil.rmtree("rs/destination/d6/sitemaps") except: pass Config.__set_config_filename__("test-files/config.txt") Config().__drop__() DestinationMap.__set_map_filename__("test-files/desmap.txt") DestinationMap().__drop__() des.reporter.reset_instance() Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__set_destination__( "http://localhost:8000/rs/source/s6", "rs/destination/d6") des.processor.processor_listeners.append(SitemapWriter()) base_uri = "http://localhost:8000/rs/source/s6/" sdproc = Sodesproc(base_uri) sdproc.read_source() self.assertEqual(200, sdproc.source_status) self.assertEqual(Status.document, sdproc.status) self.assertTrue( os.path.isfile( "rs/destination/d6/sitemaps/.well-known/resourcesync"))
def __init__(self, config_filename="conf/config.txt"): ''' Create a Runner using the configuration file denoted by config_filename. :param config_filename: :return: None ''' try: Config.__set_config_filename__(config_filename) config = Config() except FileNotFoundError as err: print(err) raise err logging_configuration_file = config.prop(Config.key_logging_configuration_file, "conf/logging.conf") # logging.config.fileConfig raises "KeyError: 'formatters'" if the configuration file does not exist. # A FileNotFoundError in this case is less confusing. if not os.path.isfile(logging_configuration_file): # It seems there is no default logging configuration to the console in Python? # In that case we'll call it a day. raise FileNotFoundError("Logging configuration file not found: " + logging_configuration_file) logging.config.fileConfig(logging_configuration_file) self.logger = logging.getLogger(__name__) self.pid = os.getpid() self.sources = None self.exceptions = [] self.logger.info("Started %s with pid %d" % (__file__, self.pid)) self.logger.info("Configured %s from '%s'" % (self.__class__.__name__, config_filename)) self.logger.info("Configured logging from '%s'" % logging_configuration_file) self.__inject_dependencies__(config)
def test04_process_baseline_netloc(self): Config().__set_prop__(Config.key_use_netloc, "True") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__remove_destination__("http://localhost:8000/rs/source/s1") __clear_sources_xml__("s1") __create_resourcelist__("s1") if os.path.isdir("localhost:8000"): logger.debug("Expecting only audit") expected_sync_status_count = 1 else: logger.debug("Expecting update") expected_sync_status_count = 2 logger.debug("\n=========================\n") relisync = Relisync("http://localhost:8000/rs/source/s1/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) reporter = des.reporter.instance() # depends on whether test is run individually or in group #self.assertEqual(expected_sync_status_count, len(reporter.sync_status)) reporter.sync_status_to_file("logs/baseline-netloc.csv")
def test02_new(self): Config.__set_config_filename__("test-files/config.txt") config = Config() self.assertEqual("test-files/config.txt", config._config_filename) self.assertEqual("test-files/config.txt", Config.__get_config_filename__()) self.assertEqual("logging.conf", config.prop(Config.key_logging_configuration_file)) self.assertEqual("test-files/desmap.txt", config.prop(Config.key_location_mapper_destination_file))
def test04_list_prop(self): Config.__set_config_filename__("test-files/config.txt") config = Config() list = config.list_prop("test_list") self.assertEqual(3, len(list)) self.assertEqual("foo.bar", list[0]) self.assertEqual("bar.foo", list[1]) self.assertEqual("foo.bar.baz", list[2])
def test_inject_dependencies(self): Config.__set_config_filename__("test-files/config.txt") Config().__set_prop__( Config.key_des_processor_listeners, "des.processor_listener.SitemapWriter, des.processor.ProcessorListener" ) runner = DesRunner() self.assertEqual(2, len(des.processor.processor_listeners))
def test_03_change_delete(self): Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__set_destination__("http://localhost:8000/rs/source/s2", "rs/destination/d2") __clear_destination__("d2") __clear_sources_xml__("s2") __add_resource__("s2", "added.txt") __create_resourcelist__("s2") logger.debug("\n=========== create ==============\n") relisync = Relisync("http://localhost:8000/rs/source/s2/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) __change_resource__("s2", "resource2.txt") __delete_resource__("s2", "added.txt") __create_changelist__("s2") des.reporter.reset_instance() #time.sleep(5) logger.debug("\n=========== update + delete ==============\n") chanlisync = Chanlisync("http://localhost:8000/rs/source/s2/changelist.xml") chanlisync.process_source() self.assertEqual(0, len(chanlisync.exceptions)) self.assertEqual(Status.processed, chanlisync.status) reporter = des.reporter.instance() reporter.sync_status_to_file("logs/incremental-change-delete.csv") self.assertEqual(2, len(reporter.sync_status)) self.assertIsNone(reporter.sync_status[1].same) self.assertEqual(0, reporter.sync_status[1].created) self.assertEqual(1, reporter.sync_status[1].updated) self.assertEqual(1, reporter.sync_status[1].deleted) self.assertEqual(1, reporter.sync_status[1].to_delete) self.assertIsNone(reporter.sync_status[1].exception) des.reporter.reset_instance() logger.debug("\n=========== no change ==============\n") chanlisync = Chanlisync("http://localhost:8000/rs/source/s2/changelist.xml") chanlisync.process_source() self.assertEqual(0, len(chanlisync.exceptions)) self.assertEqual(Status.processed, chanlisync.status) reporter = des.reporter.instance() self.assertEqual(1, len(reporter.sync_status)) self.assertIsNone(reporter.sync_status[0].same) self.assertEqual(0, reporter.sync_status[0].created) self.assertEqual(0, reporter.sync_status[0].updated) self.assertEqual(0, reporter.sync_status[0].deleted) self.assertEqual(0, reporter.sync_status[0].to_delete) self.assertIsNone(reporter.sync_status[0].exception)
def event_sitemap_received(self, uri, capability, text): config = Config() netloc = config.boolean_prop(Config.key_use_netloc, False) baser_uri, local_path = DestinationMap().find_local_path(uri, netloc=netloc, infix=SITEMAP_FOLDER) if local_path is not None: os.makedirs(os.path.dirname(local_path), exist_ok=True) with open(local_path, "w") as file: file.write(text) self.logger.debug("Saved %s '%s'" % (capability, local_path)) else: self.logger.warn("Could not save %s. No local path for %s" % (capability, uri))
def test02_new(self): Config.__set_config_filename__("test-files/config.txt") config = Config() self.assertEqual("test-files/config.txt", config._config_filename) self.assertEqual("test-files/config.txt", Config.__get_config_filename__()) self.assertEqual("logging.conf", config.prop(Config.key_logging_configuration_file)) self.assertEqual( "test-files/desmap.txt", config.prop(Config.key_location_mapper_destination_file))
def set_config(self, config_file, section): """ Establecer el archivo de configuración y la sección. Set config file and section. :param config_file: nombre del archivo de config. | file name of the config file :param section: la sección dentro del archivo | the section within the config file """ self.config_file = config_file self.section = section # se crea una instancia del administrador de la configuración # instantiate config manager self.config = Config(self.config_file, self.section)
def process_source(self): config = Config() netloc = config.boolean_prop(Config.key_use_netloc, False) base_uri, destination = DestinationMap().find_destination(self.uri, netloc=netloc, infix="resources") if destination is None: self.logger.debug("No destination for %s" % self.uri) self.exceptions.append("No destination for %s" % self.uri) des.reporter.instance().log_status(self.uri, exception="No destination specified and use of net location prohibited.") else: self.__synchronize__(destination) self.status = Status.processed_with_exceptions if self.has_exceptions() else Status.processed
def event_sitemap_received(self, uri, capability, text): config = Config() netloc = config.boolean_prop(Config.key_use_netloc, False) baser_uri, local_path = DestinationMap().find_local_path( uri, netloc=netloc, infix=SITEMAP_FOLDER) if local_path is not None: os.makedirs(os.path.dirname(local_path), exist_ok=True) with open(local_path, "w") as file: file.write(text) self.logger.debug("Saved %s '%s'" % (capability, local_path)) else: self.logger.warn("Could not save %s. No local path for %s" % (capability, uri))
def test03_boolean_prop(self): Config.__set_config_filename__("test-files/config.txt") config = Config() self.assertFalse(config.boolean_prop(Config.key_use_netloc)) config.__set_prop__(Config.key_use_netloc, str(True)) self.assertTrue(config.boolean_prop(Config.key_use_netloc)) config.__set_prop__(Config.key_use_netloc, str(False)) self.assertFalse(config.boolean_prop(Config.key_use_netloc)) self.assertTrue(config.boolean_prop("no_key", True)) self.assertFalse(config.boolean_prop("no_key", False))
def setUpModule(): global server server_address = ('', 8000) handler_class = SimpleHTTPRequestHandler server = HTTPServer(server_address, handler_class) t = threading.Thread(target=server.serve_forever) t.daemon = True logger.debug("Starting server at http://localhost:8000/") t.start() proc.processor_listeners.append(SitemapWriter()) Config.__set_config_filename__("test-files/config.txt") Config().__drop__() DestinationMap.__set_map_filename__("test-files/desmap.txt") DestinationMap().__drop__() DestinationMap().__set_destination__("http://localhost:8000/rs/source/discover/", "rs/destination/discover")
def setUpModule(): global server server_address = ('', 8000) handler_class = SimpleHTTPRequestHandler server = HTTPServer(server_address, handler_class) t = threading.Thread(target=server.serve_forever) t.daemon = True logger.debug("Starting server at http://localhost:8000/") t.start() proc.processor_listeners.append(SitemapWriter()) Config.__set_config_filename__("test-files/config.txt") Config().__drop__() DestinationMap.__set_map_filename__("test-files/desmap.txt") DestinationMap().__drop__() DestinationMap().__set_destination__( "http://localhost:8000/rs/source/discover/", "rs/destination/discover")
def run(self, sources, task="discover", once=False): """ Run the DesRunner. A running application can be stopped by creating a file named 'stop' in the directory the runner was started from. Source urls are read from the file given in param 'sources'. This file is read each time a full round of synchronizing has taken place, so source urls can be extended or changed without restarting the application. Sources are mapped to the destinations given in the file denoted by the configuration parameter "location_mapper_destination_file". :param sources: the file containing source urls :param task: the task to run. - If source urls can all be discovered by reading the .well-known/resourcesync on each source, use 'wellknown'. - If all source urls point to capability lists, use 'capability'. - If source urls are heterogeneous, use 'discover'. :param once: True for exploring source urls once and than exit, False otherwise :return: """ condition = True while condition: # list of urls self.logger.info("Reading source urls from '%s'" % sources) self.__read_sources_doc__(sources) # reset url --> destination map. New mappings may be configured DestinationMap.__set_map_filename__(Config(). prop(Config.key_location_mapper_destination_file, "conf/desmap.txt")) # drop to force fresh read from file DestinationMap().__drop__() # Set the root of the destination folder if configured DestinationMap().set_root_folder(Config().prop(Config.key_destination_root)) # do all the urls self.__do_task__(task) # report self.__do_report__(task) # to continue or not to continue condition = not (once or self.__stop__()) if condition: pause = Config().int_prop(Config.key_sync_pause) self.logger.info("Going to sleep for %d seconds." % pause) self.logger.debug("zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz") time.sleep(pause) # repeat after sleep condition = not (once or self.__stop__())
def test08_try_robots_with_netloc(self): DestinationMap().__remove_destination__( "http://localhost:8000/rs/source/discover/") Config().__set_prop__(Config.key_use_netloc, "True") uri = "http://localhost:8000/rs/source/discover/loc2" discoverer = Discoverer(uri) processor = discoverer.get_processor() self.assertIsInstance(processor, proc.Reliproc) processor.read_source()
def __synchronize__(self, destination): config = Config() checksum = config.boolean_prop(Config.key_use_checksum, True) audit_only = config.boolean_prop(Config.key_audit_only, True) allow_deletion = not audit_only desclient = des.desclient.instance() try: desclient.set_mappings((self.uri, destination)) self.do_synchronize(desclient, allow_deletion, audit_only) except ClientFatalError as err: self.logger.warn("EXCEPTION while syncing %s" % self.uri, exc_info=True) desclient.log_status(exception=err) self.exceptions.append(err) finally: # A side effect (or a bug ;) is messing around with the # class-level property Client.checksum. Make sure it is always set to initial value before the next # source is processed. desclient.checksum = checksum
def test03_process_baseline(self): Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__set_destination__("http://localhost:8000/rs/source/s1", "rs/destination/d1") __clear_destination__("d1") __clear_sources_xml__("s1") __create_resourcelist__("s1") des.reporter.reset_instance() logger.debug("\n=========== create ==============\n") relisync = Relisync("http://localhost:8000/rs/source/s1/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) reporter = des.reporter.instance() # sync_status count: 1 for audit, 1 for create. expected 2 # print(reporter.sync_status_to_string()) self.assertEqual(2, len(reporter.sync_status)) self.assertEqual(0, reporter.sync_status[0].same) self.assertEqual(3, reporter.sync_status[0].created) self.assertEqual(0, reporter.sync_status[0].updated) self.assertEqual(0, reporter.sync_status[0].deleted) self.assertEqual(0, reporter.sync_status[0].to_delete) self.assertIsNone(reporter.sync_status[0].exception) #reporter.sync_status_to_file("logs/baseline.csv") logger.debug("\n============ update =============\n") relisync = Relisync("http://localhost:8000/rs/source/s1/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) reporter = des.reporter.instance() # sync_status count: 1 for audit, 1 for create (both from previous run), 1 for audit, no update. expected 3 self.assertEqual(3, len(reporter.sync_status)) self.assertEqual(3, reporter.sync_status[2].same) self.assertEqual(0, reporter.sync_status[2].created) self.assertEqual(0, reporter.sync_status[2].updated) self.assertEqual(0, reporter.sync_status[2].deleted) self.assertEqual(0, reporter.sync_status[2].to_delete) self.assertIsNone(reporter.sync_status[2].exception)
def test02_process_audit(self): Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "True") DestinationMap().__set_destination__("http://localhost:8000/rs/source/s1", "rs/destination/d1") __clear_destination__("d1") __clear_sources_xml__("s1") __create_resourcelist__("s1") des.reporter.reset_instance() logger.debug("\n=========================\n") relisync = Relisync("http://localhost:8000/rs/source/s1/resourcelist.xml") relisync.process_source() self.assertEqual(0, len(relisync.exceptions)) self.assertEqual(Status.processed, relisync.status) reporter = des.reporter.instance() self.assertEqual(1, len(reporter.sync_status)) reporter.sync_status_to_file("logs/audit.csv")
def test07_process_source(self): # connection and readable resourcesync, write sitemap to file try: shutil.rmtree("rs/destination/d6/sitemaps") except: pass Config.__set_config_filename__("test-files/config.txt") Config().__drop__() DestinationMap.__set_map_filename__("test-files/desmap.txt") DestinationMap().__drop__() des.reporter.reset_instance() Config().__set_prop__(Config.key_use_netloc, "False") Config().__set_prop__(Config.key_audit_only, "False") DestinationMap().__set_destination__("http://localhost:8000/rs/source/s6", "rs/destination/d6") des.processor.processor_listeners.append(SitemapWriter()) base_uri = "http://localhost:8000/rs/source/s6/" sdproc = Sodesproc(base_uri) sdproc.read_source() self.assertEqual(200, sdproc.source_status) self.assertEqual(Status.document, sdproc.status) self.assertTrue(os.path.isfile("rs/destination/d6/sitemaps/.well-known/resourcesync"))
def sync_status_to_file(self, filename=None): if filename is None: filename = Config().prop(Config.key_sync_status_report_file, "sync-status.csv") with open(filename, 'w') as file: file.write( "%s\n" % "date,uri,in_sync,incremental,audit,same,created,updated,deleted,to_delete,exception,origin" ) for item in self.sync_status: file.write("%s\n" % item) file.close() self.logger.info("Wrote %d source statuses to audit file %s" % (len(self.sync_status), filename))
def instance(): """ resync.Client is a somewhat heavy class. Desclient inherits and is adapted to be used during one run of resyncing several sources. For convenience: grab the one instance from here. :return: an instance of Desclient """ global _instance logger = logging.getLogger(__name__) if _instance is None: config = Config() # Parameters in the constructor of resync Client checksum = config.boolean_prop(Config.key_use_checksum, True) verbose = False # Parameters in the method client.baseline_or_audit audit_only = config.boolean_prop(Config.key_audit_only, True) dryrun = audit_only _instance = DesClient(checksum, verbose, dryrun) logger.debug("Created a new %s [checksum=%s, verbose=%s, dryrun=%s]" % ( _instance.__class__.__name__ , checksum, verbose, dryrun)) return _instance
def setUp(self): Config.__set_config_filename__("test-files/config.txt") Config().__drop__()
def test01_new(self): Config.__set_config_filename__("test-files/no-config.txt") with self.assertRaises(FileNotFoundError): Config()
def test04__drop__(self): Config.__set_config_filename__("test-files/config.txt") config1 = Config() self.assertIsNotNone(config1) self.assertIsNone(config1.prop("this_is")) config1.__drop__() self.assertIsNone(Config.__instance__) Config.__set_config_filename__("test-files/alt-config.txt") config2 = Config() self.assertIsNotNone(config2) self.assertNotEqual(config1, config2) self.assertEqual("a_test", config2.prop("this_is")) config2.__drop__()
def setUp(self): Config.__set_config_filename__("test-files/config.txt") Config().__drop__() DestinationMap.__set_map_filename__("test-files/desmap.txt") DestinationMap().__drop__() des.desclient.reset_instance()
def setUp(self): Config.__set_config_filename__("test-files/config.txt") Config().__drop__() des.dump.dump_listeners.append(SitemapWriter()) DestinationMap().__set_destination__("http://localhost:8000/rs/source", "rs/destination/d7")
def setUpClass(cls): Config.__set_config_filename__("test-files/config.txt")
class Sim: """ Clase principal del simulador. Main simulator class. """ # nombre de la sección en el archivo de configuración que incluye todos los parámetros # name of the section in the configuration file that includes all simulation parameters PAR_SECTION = "Simulacion" # parámetro de duración de la simulación # simulation duration parameter PAR_DURATION = "duracion" # la cantidad de tiempo mímino detectada por el sistema # smallest amount of time the system can detect PAR_TIEMPOMINIMO = "tiempo-minimo" # .csv con información de los dispositivos # .csv file with information of the UE PAR_UE = "dispositivos" # .cvs con información de los eventos generados con el algoritmo CMMPP # .csv file with CMMPP events PAR_EVENTOS = "eventos" # Exponente de perdida de trayectoria # Path Loss Exponent PLExp = "PLE" # ancho de banda de subportadora # subchannel bandwith BW_subportadoraNBIoT = "BW-subportadoraNBIoT" # Potencia máxima de dispositivos URLLC # Pmax for URLLC devices PURLLC = "Pmax-URLLC" # Potencia máxima de dispositivos mMTC # Pmax for mMTC devices PmMTC = "Pmax-mMTC" # tamaño máximo de cluster # cluster max size k_max = "kmax" # radio de la célula # cell radius CELL_RADIO = "radio" def __init__(self): """ Constructor inicializando el tiempo actual en 0 y las colas de eventos vacias. Constructor initializing current time to 0 and the queue of events to empty. """ # tiempo actual de la simulación # current simulation time self.time = 0 # cola de eventos, se implementa como una estructura heap # queue of events, implemented as a heap self.queue = [] # lista de nodos, UE's y eNB # list of nodes, ue's and eNB self.nodes = [] # lista de eventos que importaremos del archivo .csv # list of eventos imported from .csv self.eventos = [] # esta lista registra todos los eventos y no solo los que se agregan al log # this list registers all events and not only the ones in the logs self.eventosaux=[] # lista de dispositivos a ser evaluados en el siguiente algoritmo NOMA # list of UE's to be evaluated in the next NOMA computing self.universoNOMA =[] #lista de bloqueos por negación de servicio , sin cluster self.bloqueoSinCluster = [] # initialize() debe ser llamada antes de correr la simulación # initialize() should be called before running the simulation self.initialized = False # archivo de configuración # empty config file self.config_file = "" # sección dentro del archivo de configuración # empty section self.section = "" def set_config(self, config_file, section): """ Establecer el archivo de configuración y la sección. Set config file and section. :param config_file: nombre del archivo de config. | file name of the config file :param section: la sección dentro del archivo | the section within the config file """ self.config_file = config_file self.section = section # se crea una instancia del administrador de la configuración # instantiate config manager self.config = Config(self.config_file, self.section) def initialize(self, run_number): """ Método para inicializar la simulación. Simulation initialization method. :param run_number: el índice de la simulación a correr | the index of the simulation to be run """ if self.config_file == "" or self.section == "": print("Configuration error. Call set_config() before initialize()") sys.exit(1) # set and check run number self.run_number = run_number if run_number >= self.config.get_runs_count(): print("Simulation error. Run number %d does not exist. Please run " "the simulator with the --list option to list all possible " "runs" % run_number) sys.exit(1) self.config.set_run_number(run_number) # instanciamos el logger de los eventos # instantiate data logger self.logger = Log(self.config.get_output_file()) # obtenemos la duración de la simulación # get simulation duration self.duration = self.config.get_param(self.PAR_DURATION) # obtener el tiempo mínimo registrado por el simulador # get minimum time registered by the simulator self.tiempoMinimo = self.config.get_param(self.PAR_TIEMPOMINIMO) # radio de célula # cell radius self.radio_cell = self.config.get_param(self.CELL_RADIO) # exponente de pérdida por trayectoria # path loss exponent self.PLE = self.config.get_param(self.PLExp) # ancho de banda de subportadora # subcarrier bandwidth self.bwSubportNBIoT = self.config.get_param(self.BW_subportadoraNBIoT) # Potencia de ruido térmico self.potenciaRuidoTermico = 5.012e-21 # Potencia de dispositivos URLLC self.pmaxURLLC = self.config.get_param(self.PURLLC) # Potencia de dispositivos mMTC self.pmaxmMTC = self.config.get_param(self.PmMTC) # tamaño máximo de cluster self.kmax = self.config.get_param(self.k_max) # d0 para algoritmo noma self.d0 = 1000 # se instancia el canal # instantiate the channel self.channel = Channel(self.config) # nombre del archivo que contiene los eventos # name of the files with the events self.eventosArchivo = self.config.get_param(self.PAR_EVENTOS) # Formato | Format # [0,0.02,7,Monitoreo de agua y electricidad,0,20.65,1] => [idalarma,tiempo,iddispositivo,tipodispositivo,tipoevento,tampaquete,modelotrafico] # se lee el archivo .csv y se guardan en la lista eventos # .csv file is saved into eventos list eventos_rec = pd.read_csv(self.eventosArchivo, index_col=0) self.eventos = eventos_rec.values.tolist() # nombre del archivo que contiene los dispositivos # name of the file conteining the UE's self.dispositivos = self.config.get_param(self.PAR_UE) # Formato | Format #[1, Control de iluminacion, 7.776887288396965, 26.52437539236592] => [iddispositivo, tipodispositivo, posx, posy] # se lee el archivo .csv y se guardan en la lista dispositivosLista # .csv file is saved into dispositivosLista list dispositivos_rec = pd.read_csv(self.dispositivos, index_col=0) self.dispositivosLista= dispositivos_rec.values.tolist() # instanciamos todos los nodos incluyendo eNB # instantiate all the nodes starting by the eNB # creamos la estacion base como nodo 0 # eNB is created as node 0 self.node_eNB = Node(0, 'eNB', self.config, self.channel, 0, 0) # avisamos al canal de la existencia del nodo # let the channel know about this node #self.channel.register_node(self.node_eNB) # inicializamos enB y lo agregamos a la lista de nodos # eNB is initialized and added to nodes list self.node_eNB.initialize_eNB() self.nodes.append(self.node_eNB) # se crean los dispositivos # UE's are created for d in self.dispositivosLista: id= d[0] tipo= d[1] x = d[2] y = d[3] node = Node(id,tipo,self.config, self.channel, x, y) # inicializamos el nodo y lo agregamos a la lista de nodos # node is initialized and added to nodes list node.initialize() self.nodes.append(node) # hecho esto, la simulación puede iniciar # all done. simulation can start now self.initialized = True def run(self): """ Corre la simulación. Runs the simulation. """ # primero verifica que la simulación está inicializada antes # first check that everything is ready if not self.initialized: print("Cannot run the simulation. Call initialize() first") sys.exit(1) # se guarda el momento en el que la simulación inicio # save the time at which the simulation started, for statistical purpose start_time = time.time() # la última vez que se imprimio el porcentaje de la simulación # last time we printed the simulation percentage prev_time = start_time # se imprime el porcentaje por primera vez (0%) # print percentage for the first time (0%) self.print_percentage(True) # bucle de simulación principal # main simulation loop while self.time <= self.duration: # obtenemos el siguiente evento y se llama al método que se encarga de ese evento en el destino # get next event and call the handle method of the destination event = self.next_event() dst = event.get_destination() src = event.get_source() dst.handle_event(event, src) # obtenemos el tiempo actual # get current real time curr_time = time.time() # si más de un segundo ha pasdo, se actualiza la barra de porcenteje # if more than a second has elapsed, update the percentage bar if curr_time - prev_time >= 1: self.print_percentage(False) prev_time = curr_time # completada la simulación, se imprime el porcentaje por última vez (100%) # simulation completed, print the percentage for the last time (100%) self.print_percentage(False) # se calcula cúanto tiempo tomó la simulación # compute how much time the simulation took end_time = time.time() total_time = round(end_time - start_time) print("\nTiempo máximo de simulación alcanzado. Terminando.") print("Tiempo total de la simulación: %d horas, %d minutos, %d segundos" % (total_time // 3600, total_time % 3600 // 60, total_time % 3600 % 60)) self.logger.log_file.close() ######################### def schedule_event(self, event): """ Calendariza un nuevo evento en la cola queue. Adds a new event to the queue of events :param event: el evento a calendarizar | the event to schedule. """ if event.get_time() < self.time: print("Schedule error: Module with id %d of type %s is trying to " "schedule an event in the past. Current time = %f, schedule " "time = %f", (event.get_source.get_id(), event.get_source.get_type(), self.time, event.get_time())) sys.exit(1) heapq.heappush(self.queue, event) def next_event(self): """ Retorna el primer evento en la cola queue Returns the first event in the queue """ try: event = heapq.heappop(self.queue) self.time = event.event_time return event except IndexError: print("\n Sin más eventos en la pila. Terminando.") sys.exit(0) def cancel_event(self, event): """ Elimina un evento calendarizado de la cola queue Deletes a scheduled event from the queue :param event: the event to be canceled """ try: self.queue.remove(event) heapq.heapify(self.queue) except ValueError: print("Trying to delete an event that does not exist.") sys.exit(1) def print_percentage(self, first): # se regresa al inicio de la línea # go back to the beginning of the line if not first: sys.stdout.write('\r' + ERASE_LINE) # se calcula el porcentaje # compute percentage perc = min(100, int(math.floor(self.time / self.duration * 100))) # se imprime la barra de progreso # print progress bar, percentage, and current element sys.stdout.write("[%-20s] %d%% (tiempo = %f, tiempo total = %f)" % ('=' * (perc // 5), perc, self.time, self.duration)) sys.stdout.flush() def get_runs_count(self): """ Retorna el número de corridas para el archivo de configuración y sección dados Returns the number of runs for the given config file and section :returs: the total number of runs """ if self.config_file == "" or self.section == "": print("Configuration error. Call set_config() before " "get_runs_count()") sys.exit(1) return self.config.get_runs_count() def get_logger(self): """ Retorna el módulo que que crea logs de los eventos. Returns the data logger to modules. """ return self.logger def get_time(self): """ Retorna el tiempo actual de la simulación. Returns current simulation time. """ return self.time def get_params(self, run_number): """ Retorna una representación textual de los parámetros de la simualción dados para un número de corrida (run). Returns a textual representation of simulation parameters for a given run number. :param run_number: el número de corrida | the run number :returns: representación textual del parámetro| textual representation of parameters for run_number """ return self.config.get_params(run_number) # def algoritmo_RA(self): # """ # Algoritmo para computar el resultado del RA. # Returns the result of the Random Access. # """ # preambulos=len(self.universoNPRACH) # #throughput = int(np.random.uniform(0, preambulos, 1)) # if preambulos==0: # throughput=0 # else: # throughput = self.RAmaxthroughput[preambulos-1][1] # # # calculamos aleatoriamente qué dispositivos del universo no completaron su RA # # we compute the preambles that didn't pass the RA # random.shuffle(self.universoNPRACH) # universoNPRACHaux= self.universoNPRACH[:(preambulos-throughput)] # # for evento in universoNPRACHaux: # self.cancel_event(evento) # evento.get_source().current_pkt=None # evento.get_source().state=Node.IDLE # # schedule next arrival # evento.get_source().schedule_next_arrival() # # assert (len(universoNPRACHaux)+throughput==preambulos) # self.universoNPRACH = [] # return throughput
def base_line(self, unzipdir): """ Synchronize the unzipped contents of a resource dump with the local resources :param unzipdir: the directory of the unzipped packed contents. :return: """ manifest_file_name = os.path.join(unzipdir, "manifest.xml") try: sitemap = Sitemap() manifest_doc = sitemap.parse_xml(fh=manifest_file_name) # the manifest_doc is a resync.resource_container.ResourceContainer capability = manifest_doc.capability assert capability == CAPA_RESOURCEDUMP_MANIFEST, "Capability is not %s but %s" % ( CAPA_RESOURCEDUMP_MANIFEST, capability) self.status = Status.parsed self.__inform_sitemap_received__(capability, manifest_file_name) config = Config() netloc = config.boolean_prop(Config.key_use_netloc, False) base_uri, destination = DestinationMap().find_destination( self.pack_uri, netloc=netloc) assert destination is not None, "Found no destination folder in DestinationMap" mapper = Mapper((base_uri, destination)) rlb = ResourceListBuilder(mapper=mapper) dst_resource_list = rlb.from_disk() # Compares on uri same, updated, deleted, created = dst_resource_list.compare( manifest_doc) raise NotImplementedError("This class is not fully implemented.") print(len(same), len(updated), len(deleted), len(created)) print("same") for resource in same: print(resource) print("updated") for resource in updated: print(resource) print("deleted") for resource in deleted: print(resource) print("created") for resource in created: print(resource) base_uri, local_path = DestinationMap().find_local_path( resource.uri) print(base_uri, local_path) except AssertionError as err: self.logger.debug("%s Error: %s" % (self.pack_uri, str(err))) self.status = Status.parse_error self.exceptions.append(err) except SitemapParseError as err: self.logger.debug("%s Unreadable source: %s" % (self.source_uri, str(err))) self.status = Status.parse_error self.exceptions.append(err) self.status = Status.processed_with_exceptions if self.has_exceptions( ) else Status.processed