def load_cities(request): """ Before running the tests we want to load cities """ log = logging.getLogger(__name__) if request.config.getvalue("skip_cities") or request.config.getvalue( "check_ref"): log.info("skipping cities loading") return if config.get('USE_ARTEMIS_NG'): log.warning('Posting cities') url = config['URL_TYR'] + "/v0/cities/" files = {'file': open(config['CITIES_INPUT_FILE'], 'rb')} r = requests.post(url, files=files) r.raise_for_status() return log.info("loading cities database") utils.launch_exec('cities -i {input} --connection-string'.format( input=config['CITIES_INPUT_FILE']), additional_args=[config['CITIES_DB']]) log.info("cities database loaded")
def pop_jormungandr(cls): """ launch the front end """ logging.getLogger(__name__).debug("running jormungandr") # jormungandr is launched with apache utils.launch_exec("sudo service apache2 status") ret, _ = utils.launch_exec("sudo service apache2 start") utils.launch_exec("sudo service apache2 status") assert ret == 0, "cannot start apache" # to have better errors, we check at the beginning that all is right for data_set in cls.data_sets: # we wait a bit for the kraken to be started try: Retrying( stop_max_delay=data_set.reload_timeout.total_seconds() * 1000, wait_fixed=data_set.fixed_wait.total_seconds() * 1000, retry_on_result=lambda x: x != "running", ).call(kraken_status, data_set) except RetryError as e: assert False, "region {r} KO, status={s}".format( r=data_set.name, s=e.last_attempt.value)
def kill_jormungandr(cls): logging.getLogger(__name__).debug("killing jormungandr") utils.launch_exec("sudo service apache2 status") ret, _ = utils.launch_exec("sudo service apache2 stop") utils.launch_exec("sudo service apache2 status") assert ret == 0, "cannot stop apache"
def kill_jormungandr(cls): logging.getLogger(__name__).debug("killing jormungandr") utils.launch_exec('sudo service apache2 status') ret, _ = utils.launch_exec('sudo service apache2 stop') utils.launch_exec('sudo service apache2 status') assert ret == 0, "cannot stop apache"
def load_cities(request): """ Load cities before running the tests """ def is_retry_exception(exception): return isinstance(exception, RetryError) def get_last_cities_job(): r_cities = requests.get(config["URL_TYR"] + "/v0/cities/status") r_cities.raise_for_status() return json.loads(r_cities.text)["latest_job"] @retry(stop_max_delay=300000, wait_fixed=500, retry_on_exception=is_retry_exception) def wait_for_cities_completion(): """ Wait until the 'cities' task is completed The task is considered failed if any error occurs while requesting Tyr """ last_cities_job = get_last_cities_job() if last_cities_job and "state" in last_cities_job: if last_cities_job["state"] == "running": raise RetryError("Cities task still running...") elif last_cities_job["state"] == "failed": raise Exception("Job 'cities' status") else: raise Exception("Couldn't get 'cities' job status") log = logging.getLogger(__name__) if request.config.getvalue("skip_cities") or request.config.getvalue( "check_ref"): log.info("skipping cities loading") return if config.get("USE_ARTEMIS_NG"): log.info("Posting cities") url = config["URL_TYR"] + "/v0/cities/" files = {"file": open(config["CITIES_INPUT_FILE"], "rb")} r = requests.post(url, files=files) r.raise_for_status() wait_for_cities_completion() log.info("Cities task finished") return log.info("loading cities database") utils.launch_exec( "cities -i {input} --connection-string".format( input=config["CITIES_INPUT_FILE"]), additional_args=[config["CITIES_DB"]], ) log.info("cities database loaded")
def read_data_by_dataset(cls, data_set): logging.getLogger(__name__).debug("reading data for {}".format(data_set.name)) # we'll read all subdir data_path = utils.instance_data_path(data_set.name) data_dirs = [os.path.join(data_path, sub_dir_name) for sub_dir_name in os.listdir(data_path) if os.path.isdir(os.path.join(data_path, sub_dir_name))] logging.getLogger(__name__).debug("loading {}".format(data_dirs)) utils.launch_exec("sudo {tyr} load_data {data_set} {data_set_dir}" .format(tyr=_tyr, data_set=data_set.name, data_set_dir=','.join(data_dirs)), additional_env={'TYR_CONFIG_FILE': _tyr_config_file})
def load_cities(request): """ Before running the tests we want to load cities """ log = logging.getLogger(__name__) if request.config.getvalue("skip_cities"): log.info("skiping cities loading") return log.info("loading cities database") utils.launch_exec('cities -i {input} --connection-string'.format( input=config['CITIES_INPUT_FILE']), additional_args=[config['CITIES_DB']]) log.info("cities database loaded")
def load_cities(request): """ Before running the tests we want to load cities """ log = logging.getLogger(__name__) if request.config.getvalue("skip_cities"): log.info("skiping cities loading") return log.info("loading cities database") utils.launch_exec('cities -i {input} --connection-string' .format(input=config['CITIES_INPUT_FILE']), additional_args=[config['CITIES_DB']]) log.info("cities database loaded")
def read_data_by_dataset(cls, data_set): logging.getLogger(__name__).debug("reading data for {}".format( data_set.name)) # we'll read all subdir data_path = utils.instance_data_path(data_set.name) data_dirs = [ os.path.join(data_path, sub_dir_name) for sub_dir_name in os.listdir(data_path) if os.path.isdir(os.path.join(data_path, sub_dir_name)) ] logging.getLogger(__name__).debug("loading {}".format(data_dirs)) utils.launch_exec("{tyr} load_data {data_set} {data_set_dir}".format( tyr=_tyr, data_set=data_set.name, data_set_dir=','.join(data_dirs)), additional_env={'TYR_CONFIG_FILE': _tyr_config_file})
def kill_the_krakens(cls): if cls.check_ref: return for data_set in cls.data_sets: logging.getLogger(__name__).debug("killing the kraken {}".format(data_set.name)) return_code, _ = utils.launch_exec('sudo {service} {kraken} stop'.format(service=_kraken_wrapper, kraken=data_set.name)) assert return_code == 0, "command failed"
def kill_the_krakens(cls): if cls.check_ref: return for data_set in cls.data_sets: logging.getLogger(__name__).debug("killing the kraken {}".format( data_set.name)) return_code, _ = utils.launch_exec( "sudo {service} {kraken} stop".format(service=_kraken_wrapper, kraken=data_set.name)) assert return_code == 0, "command failed"
def pop_jormungandr(cls): """ launch the front end """ logging.getLogger(__name__).debug("running jormungandr") # jormungandr is launched with apache utils.launch_exec('sudo service apache2 status') ret, _ = utils.launch_exec('sudo service apache2 start') utils.launch_exec('sudo service apache2 status') assert ret == 0, "cannot start apache" # to have better errors, we check at the beginning that all is right for data_set in cls.data_sets: # we wait a bit for the kraken to be started try: Retrying(stop_max_delay=data_set.reload_timeout.total_seconds() * 1000, wait_fixed=data_set.fixed_wait.total_seconds() * 1000, retry_on_result=lambda x: x != 'running') \ .call(kraken_status, data_set) except RetryError as e: assert False, "region {r} KO, status={s}".format(r=data_set.name, s=e.last_attempt.value)
def load_cities(request): """ Before running the tests we want to load cities """ log = logging.getLogger(__name__) if request.config.getvalue("skip_cities") or request.config.getvalue( "check_ref"): log.info("skipping cities loading") return if config['USE_DOCKER']: url = "http://localhost:9898/v0/cities/" files = {'file': open(config['CITIES_INPUT_FILE'], 'rb')} response = requests.post(url, files=files) return log.info("loading cities database") utils.launch_exec('cities -i {input} --connection-string'.format( input=config['CITIES_INPUT_FILE']), additional_args=[config['CITIES_DB']]) log.info("cities database loaded")
def pop_krakens(cls): """ launch all the kraken services """ if cls.check_ref: return for data_set in cls.data_sets: logging.getLogger(__name__).debug("launching the kraken {}".format( data_set.name)) return_code, _ = utils.launch_exec( 'sudo {service} {kraken} start'.format(service=_kraken_wrapper, kraken=data_set.name)) assert return_code == 0, "command failed"