def createMetadataFile(self, version="0.2.2"): """ Copied from test_metadata """ full_driver_path = "%s/%s/%s" % (Config().base_dir(), MI_BASE_DIR, DRIVER_DIR) if (not exists(full_driver_path)): os.makedirs(full_driver_path) md_file_path = "%s/%s" % (full_driver_path, METADATA_FILE) md_file = open(md_file_path, 'w') md_file.write("driver_metadata:\n") md_file.write(" author: Bill French\n") path_str = " driver_path: %s\n" % DRIVER_DIR md_file.write(path_str) md_file.write(" driver_name: fake_driver\n") md_file.write(" email: [email protected]\n") md_file.write(" release_notes: some note\n") version_str = " version: %s\n" % version md_file.write(version_str) constr_str = " constructor: %s\n" % CONSTRUCTOR md_file.write(constr_str) md_file.close() current_dsa_path = Config().idk_config_dir() + "/current_dsa.yml" log.info("linking %s to %s", md_file_path, current_dsa_path) # exists doesn't catch when this link is broken but still there, # need to figure out how to find and delete if exists(current_dsa_path): os.remove(current_dsa_path) os.symlink(md_file_path, current_dsa_path)
def __init__(self, rootdir=None): if not rootdir: self.rootdir = Config().get("working_repo") else: self.rootdir = rootdir # gitpy hardcodes the git command without a path, so it's using PATH to find it. We need to set the path for # git. dir = dirname(Config().get('git')) environ['PATH'] = "%s:%s" % (dir, environ['PATH']) repo = LocalRepository(self.rootdir) self.repo = repo
def test_default_config(self): """Test that the default configuration is created""" print "pbb" config = Config(ROOTDIR) self.assertTrue(config) expected_string = "idk:\n working_repo: %s\n" % config.get("working_repo") self.assertEqual(expected_string, self.read_config()) self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("couchdb")) self.assertTrue(config.get("rabbit-server"))
def launch_data_monitor(filename, launch_options=''): """ @brief launch a terminal tailing a file """ cfg = Config() xterm = cfg.get("xterm") tail = cfg.get("tail") cmd = "%s %s -e %s -f %s" % (xterm, launch_options, tail, filename) args = shlex.split(cmd) process = subprocess.Popen(args) log.debug("run cmd: %s" % cmd) return process
def launch_data_monitor(filename, launch_options=""): """ @brief launch a terminal tailing a file """ cfg = Config() xterm = cfg.get("xterm") tail = cfg.get("tail") cmd = "%s %s -e %s -f %s" % (xterm, launch_options, tail, filename) args = shlex.split(cmd) process = subprocess.Popen(args) log.debug("run cmd: %s" % cmd) return process
def setUp(self): IDKPackageNose.setUp(self) self._metadata = Metadata('seabird', 'sbe37smb', 'ooicore') self._generator = EggGenerator(self._metadata) # Ensure the base build dir doesnt exists build_dir = path.join(self._generator._tmp_dir(), self._generator._build_name()) if exists(build_dir): rmtree(build_dir) self._generator._generate_build_dir() self._repo_dir = Config().get('working_repo') self._tmp_dir = Config().get('tmp_dir')
def __init__(self, driver_make=None, driver_model=None, driver_name=None, base_dir=Config().base_dir()): """ @brief Constructor """ self.author = None self.email = None self.driver_make = driver_make self.driver_model = driver_model self.driver_name = driver_name self.notes = None self.version = 0 self.base_dir = base_dir if (driver_make and driver_model and driver_name): log.debug("Construct from parameters") if (os.path.isfile(self.metadata_path())): self.read_from_file(self.metadata_path()) elif (not (driver_make or driver_model or driver_name)): self.read_from_file() else: raise InvalidParameters( msg= "driver_make, driver_model, driver_name must all be specified")
def test_sbe37_list(self): metadata = Metadata('seabird', 'sbe37smb', 'ooicore') filelist = DriverFileList(metadata, Config().get('working_repo')) known_files = [ 'mi/instrument/seabird/sbe37smb/ooicore/comm_config.yml', 'mi/instrument/seabird/sbe37smb/ooicore/metadata.yml', 'mi/__init__.py', 'mi/core/__init__.py', 'mi/core/common.py', 'mi/core/exceptions.py', 'mi/core/instrument/__init__.py', 'mi/core/instrument/data_particle.py', 'mi/core/instrument/instrument_driver.py', 'mi/core/instrument/instrument_fsm.py', 'mi/core/instrument/instrument_protocol.py', 'mi/core/instrument/protocol_param_dict.py', 'mi/instrument/__init__.py', 'mi/instrument/seabird/__init__.py', 'mi/instrument/seabird/sbe37smb/__init__.py', 'mi/instrument/seabird/sbe37smb/ooicore/__init__.py', 'mi/instrument/seabird/sbe37smb/ooicore/driver.py', 'mi/core/instrument/driver_client.py', 'mi/core/instrument/driver_process.py', 'mi/core/instrument/zmq_driver_client.py', 'mi/core/instrument/zmq_driver_process.py', 'mi/idk/__init__.py', 'mi/idk/comm_config.py', 'mi/idk/common.py', 'mi/idk/config.py', 'mi/idk/exceptions.py', 'mi/idk/prompt.py', 'mi/core/log.py', 'mi/core/tcp_client.py', 'mi/core/unit_test.py', 'mi/idk/util.py', 'mi/idk/instrument_agent_client.py', 'mi/core/instrument/port_agent_client.py', 'mi/core/instrument/logger_client.py', 'mi/idk/unit_test.py', 'mi/instrument/seabird/sbe37smb/ooicore/test/__init__.py', 'mi/instrument/seabird/sbe37smb/ooicore/test/test_driver.py' ] self.maxDiff = None files = filelist.files() log.debug("FILES = " + str(sorted(files))) self.assertEqual(sorted(files), sorted(known_files))
def get_drivers(): """ @brief Get a list of all drivers and their versions """ driver_dir = join(Config().get("working_repo"), 'mi', 'instrument') log.debug("Driver Dir: %s", driver_dir) drivers = {} for make in listdir(driver_dir): make_dir = join(driver_dir, make) if isdir(make_dir) and not make == 'test': for model in listdir(make_dir): model_dir = join(make_dir, model) if isdir(model_dir) and not model == 'test': for name in listdir(model_dir): name_dir = join(model_dir, name) if isdir(name_dir) and not name == 'test': log.debug("found driver: %s %s %s", make, model, name) if not drivers.get(make): drivers[make] = {} if not drivers[make].get(model): drivers[make][model] = {} drivers[make][model][ name] = SwitchDriver.get_versions( make, model, name) return drivers
def __init__(self, driver_path=None, base_dir=Config().base_dir()): """ @brief Constructor """ self.author = None self.email = None self.driver_path = driver_path self.driver_name = None self.notes = None self.version = 0 self.base_dir = base_dir self.constructor = None self.driver_name_versioned = None self.entry_point_group = None self.versioned_constructor = None log.debug("Constructing platform metadata") if (driver_path): log.debug("Construct from parameters: %s", self.metadata_path()) if (os.path.isfile(self.metadata_path())): self.read_from_file(self.metadata_path()) elif (not (driver_path)): self.read_from_file() else: raise InvalidParameters(msg="driver_path must all be specified")
def parser_test_dir(self): """ @brief full path to parser test code @retval parser test path """ return os.path.join(Config().base_dir(), "mi", "dataset", "parser", "test")
def test_overloaded_config(self): """Test that the overloaded configuration""" # Build the default config and add a line config = Config(ROOTDIR) self.write_config() self.assertTrue(config) # reload the configuration config.cm.init(ROOTDIR) expected_string = "idk:\n working_repo: %s\n couchdb: %s" % (config.get("working_repo"), config.get("couchdb")) self.assertEqual(expected_string, self.read_config()) self.assertEqual(config.get("couchdb"), "couchdb") self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("rabbit-server"))
def parser_test_modulename(self): """ @brief module name of the new driver tests @retval driver test module name """ return self.parser_test_path().replace(Config().base_dir() + "/", '').replace('/', '.').replace( '.py', '')
def driver_base_dir(self): """ @brief full path to the driver make dir @retval driver make path """ if not self.metadata.driver_path: log.info("metadata is %s", self.metadata) raise DriverParameterUndefined("driver_path undefined in metadata") return os.path.join(Config().base_dir(), "mi", "dataset", "driver")
def test_one(self): particle_data_hdlr_obj = parse(Config().base_dir(), self.sourceFilePath, ParticleDataHandler()) log.info("SAMPLES: %s", particle_data_hdlr_obj._samples) log.info("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def driver_make_dir(self): """ @brief full path to the driver make dir @retval driver make path """ if not self.metadata.driver_make: raise DriverParameterUndefined("driver_make undefined in metadata") return os.path.join(Config().base_dir(), "mi", "instrument", self.metadata.driver_make.lower())
def start_container(self, deploy_file=DEFAULT_DEPLOY, container_config=None): """ @brief Launch the instrument agent @param deploy_file Deployment file to use to start the container @param container_config container parameters that we want to overload """ log.info("Startup the capability container") if Config().get("start_couch"): self.start_couchdb() if Config().get("start_rabbit"): self.start_rabbitmq_server() # No need to start the container twice self.container = Container.instance if self.container: return if not os.path.exists(deploy_file): raise TestNoDeployFile(deploy_file) # Derive a special test case so we can instantiate a testcase object. # then we can run start_container which initialized the capability container # There will eventually be a better way to do this I'm sure. class _StartContainer(MiIntTestCase): def runTest(self): pass testcase = _StartContainer() # Start container. log.info("Starting the capability container") testcase._start_container() container = testcase.container # Bring up services in a deploy file (no need to message) log.info("Initialize container with %s" % deploy_file) container.start_rel_from_url(deploy_file, container_config) self.container = container
def test_one(self): sourceFilePath = os.path.join('mi','dataset','driver','wavss_a', 'dcl','resource','20140825.wavss.log') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'ctdpf_ckl', 'wfp', 'resource', 'C0000034.dat') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) for sample in particle_data_hdlr_obj._samples: print sample
def setUp(self): """ Setup the test case """ Config().cm.destroy() if not exists(ROOTDIR): makedirs(ROOTDIR) if exists(self.config_file()): log.debug("remove test dir %s" % self.config_file()) remove(self.config_file()) self.assertFalse(exists(self.config_file()))
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'pco2w_abc', 'resource', 'SAMI_C0069_300614.txt') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def stop_container(self): log.info("Stop the container") if not self.container: log.warn("Container not running.") return # Derive a special test case so we can instantiate a testcase object. # then we can run start_container which initiallized the capability container class _StartContainer(MiIntTestCase): def runTest(self): pass testcase = _StartContainer() testcase.container = self.container testcase._stop_container() if Config().get("start_couch"): self.stop_couchdb() if Config().get("start_rabbit"): self.stop_rabbitmq_server() self.container = None
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'wc_wm', 'cspp', 'resource', '11079364_WC_WM.txt') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def parse_file(driver_path, file_path, output_dir=None): """ This method will use a DAD driver to invoke a parser and produce the L0 parser outputs. The Outputs will be written to a file with the same root name as the input file in file_path with the .json suffix :param driver_path: full path name to dataset driver :param file_path: full path to name of data file. :param output_dir full path to directory to write outputs. :return: """ # convert driver path to module name # replace \ with / in case this is on windows machine module = driver_path.replace('\\', '/') # replace / with . module = module.replace('/', '.') # strip off file extension module = module.rstrip('.py') try: driver_module = __import__(module, fromlist='parse') except ImportError as e: log.error("could not import method parse from driver module") raise e # Split the file path up into it's components to build the output file path (dir_path, file_name) = os.path.split(file_path) (short_name, extension) = os.path.splitext(file_name) # construct the output path output_file = short_name + '.json' if output_dir: output_path = os.path.join(output_dir, output_file) else: output_path = os.path.join(dir_path, output_file) particle_data_hdlr_obj = ParticleDataHandler() driver_module.parse(Config().base_dir(), file_path, particle_data_hdlr_obj) stream_output = particle_data_hdlr_obj._samples errors = particle_data_hdlr_obj._failure if errors: log.debug('Errors during ingest test') output_fid = open(output_path, 'w') output_fid.write(json.dumps(stream_output))
def test_package_driver_real(self): """ Test with real hypm ctd driver code """ # link current metadata dsa file to a real driver, the ctd current_dsa_path = Config().idk_config_dir() + "/current_dsa.yml" ctd_md_path = "%s/%s/hypm/ctd/metadata.yml" % (Config().base_dir(), MI_BASE_DIR) log.info("linking %s to %s", ctd_md_path, current_dsa_path) # exists doesn't catch when this link is broken but still there, # need to figure out how to find and delete if exists(current_dsa_path): os.remove(current_dsa_path) log.error(current_dsa_path) os.symlink(ctd_md_path, current_dsa_path) # create the metadata so we can use it for opening the egg metadata = Metadata() # create the egg with the package driver package_driver = PackageDriver() package_driver.run() startup_config = { 'harvester': { 'directory': '/tmp/dsatest', 'pattern': '*.txt', 'frequency': 1, }, 'parser': {} } # load the driver cotr = self.load_egg(metadata) # need to load with the right number of arguments egg_driver = cotr(startup_config, None, None, None, None) log.info("driver loaded")
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'flort_dj', 'sio', 'resource', 'node59p1_0.flort.dat') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'adcps_jln', 'stc', 'resource', 'adcpt_20130929_091817.DAT') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'dosta_abcdjm', 'mmp_cds', 'resource', 'large_import.mpk') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def list_drivers(): """ @brief Print a list of all the drivers and their versions """ driver_dir = join(Config().get("working_repo"), 'mi', 'instrument') log.debug("Driver Dir: %s", driver_dir) drivers = SwitchDriver.get_drivers() for make in sorted(drivers.keys()): for model in sorted(drivers[make].keys()): for name in sorted(drivers[make][model].keys()): for version in sorted(drivers[make][model][name]): print "%s %s %s %s" % (make, model, name, version)
def test_one(self): sourceFilePath = os.path.join('mi','dataset','driver','ctdmo_ghqr','sio', 'resource','CTD15906.DAT') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) print particle_data_hdlr_obj._samples print particle_data_hdlr_obj._failure log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_overloaded_config(self): """Test that the overloaded configuration""" # Build the default config and add a line config = Config(ROOTDIR) self.write_config() self.assertTrue(config) # reload the configuration config.cm.init(ROOTDIR) expected_string = "idk:\n start_couch: false\n working_repo: %s\n start_rabbit: false\n couchdb: %s\n start_couch: True\n start_rabbit: True\n" % ( config.get("working_repo"), config.get("couchdb")) self.assertEqual(expected_string, self.read_config()) self.assertEqual(config.get("couchdb"), "couchdb") self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("rabbitmq")) self.assertTrue(True == config.get("start_rabbit")) self.assertTrue(True == config.get("start_couch"))
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'ctdbp_p', 'dcl', 'resource', 'ctdbp01_20150804_061734.DAT') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_one(self): sourceFilePath = os.path.join('mi', 'dataset', 'driver', 'flntu_x', 'mmp_cds', 'resource', 'flcdr_1_20131124T005004_459.mpk') particle_data_hdlr_obj = ParticleDataHandler() particle_data_hdlr_obj = parse(Config().base_dir(), sourceFilePath, particle_data_hdlr_obj) log.debug("SAMPLES: %s", particle_data_hdlr_obj._samples) log.debug("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def test_default_config(self): """Test that the default configuration is created""" config = Config(ROOTDIR) self.assertTrue(config) expected_string = "idk:\n start_couch: false\n working_repo: %s\n start_rabbit: false\n" % config.get("working_repo") self.assertEqual(expected_string, self.read_config()) self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("couchdb")) self.assertTrue(config.get("rabbitmq")) self.assertTrue(False == config.get("start_rabbit")) self.assertTrue(False == config.get("start_couch"))
def test_default_config(self): """Test that the default configuration is created""" if os.path.exists(ROOTDIR): config = Config(ROOTDIR) else: self.copy_config_file(self.config_file(config.cm.config_dir)) config = Config() #config = Config(ROOTDIR) self.assertTrue(config) expected_string = "idk:\n start_couch: false\n working_repo: %s\n start_rabbit: false\n" % config.get("working_repo") #self.assertEqual(expected_string, self.read_config(config.cm.config_dir)) log.debug("Config string: %s", self.read_config(config.cm.config_dir)) self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("couchdb")) self.assertTrue(config.get("rabbitmq")) self.assertTrue(False == config.get("start_rabbit")) self.assertTrue(False == config.get("start_couch"))
def test_overloaded_config(self): """Test that the overloaded configuration""" # Build the default config and add a line if os.path.exists(ROOTDIR): config = Config(ROOTDIR) else: self.copy_config_file(self.config_file(config.cm.config_dir)) config = Config() #config = Config(ROOTDIR) self.write_config(config.cm.config_dir) self.assertTrue(config) # reload the configuration config.cm.init(config.cm.config_dir) expected_string = "idk:\n start_couch: false\n working_repo: %s\n start_rabbit: false\n couchdb: %s\n start_couch: True\n start_rabbit: True\n" % (config.get("working_repo"), config.get("couchdb")) #self.assertEqual(expected_string, self.read_config(config.cm.config_dir)) self.assertEqual(config.get("couchdb"), "couchdb") self.assertTrue(config.get("working_repo")) self.assertTrue(config.get("template_dir")) self.assertTrue(config.get("rabbitmq")) self.assertTrue(True == config.get("start_rabbit")) self.assertTrue(True == config.get("start_couch"))