def testNonzeroSimReturnCode(self): class FakeBinaries(testing_fakes.FakeBinaries): _NUM_SAMPLES = 100 def Controller(self, args): with open(self._ParseArg(args, '--all_params'), 'r') as config_file: config = json.load(config_file) self._iec_case = config['sim']['iec_sim']['load_case'] # Simulate being terminated with SIGINT. return -2 def GroundEstimator(self, args): with open(self._ParseArg(args, '--all_params'), 'r') as config_file: config = json.load(config_file) self._iec_case = config['sim']['iec_sim']['load_case'] # Simulate being terminated with SIGINT. return -2 def Simulator(self, args): if (self._iec_case == sim_types. kIecCaseExtremeCoherentGustWithDirectionChange): return -1 else: return 0 def PcapToHdf5(self, args): log_file_name = self._ParseArg(args, '--output_file') MakeFakeLogFile(log_file_name, self._NUM_SAMPLES) return 0 patch = testing_fakes.PatchAllFakes( binary_map=self.BINARY_MAP, binaries=FakeBinaries(), worker_factory=batch_sim_worker.IecCasesSimWorker, client_class=batch_sim_client.IecCasesSimClient) with os_util.TempDir() as temp_dir: with patch: gflags.FLAGS.cases = ['1.1', '1.3', '1.4b'] gflags.FLAGS.output_dir = temp_dir gflags.FLAGS.local_output_dir = temp_dir client = batch_sim_client.IecCasesSimClient( num_workers=self.NUM_WORKERS, sim_name=self.SIM_NAME) client.Run() filenames = [ os.path.join(temp_dir, '%d.json' % i) for i in range(3) ] for filename in filenames: self.assertTrue(os.path.isfile(filename)) sim_successes = [] for filename in filenames: with open(filename, 'r') as f: config = json.load(f) sim_successes.append(config['sim_successful']) self.assertEqual([True, True, False], sim_successes)
def testReduceOnly(self): with os_util.TempDir() as temp_dir: FLAGS.local_output_dir = temp_dir # Run a normal batch sim to generate local worker outputs. with testing_fakes.PatchAllFakes(binary_map=self.BINARY_MAP, binaries=FakeBinaries(), worker_factory=GoodWorker, client_class=TestSimClient): client = TestSimClient(sim_name=self.SIM_NAME, num_workers=self.NUM_WORKERS) client.Run() self.assertAlmostEqual(10 * _SIM_TIME, client.total_sim_time) # Run a reduce-only client with patches that will make it crash horribly # if it does anything other than reducing. FLAGS.reduce_only = True with testing_fakes.PatchAllFakes(binary_map=None, binaries=None, worker_factory=None, client_class=TestSimClient): reducer_client = TestSimClient(sim_name=self.SIM_NAME, num_workers=self.NUM_WORKERS) reducer_client.Run() self.assertAlmostEqual(10 * _SIM_TIME, client.total_sim_time)
def testDeleteDirOnError(self): with self.assertRaises(ValueError), os_util.TempDir() as temp_dir: dir_exists_before_exception = os.path.isdir(temp_dir) raise ValueError self.assertTrue(dir_exists_before_exception) self.assertFalse(os.path.exists(temp_dir))
def testClientAndWorker(self): FLAGS.wind_speeds = numpy.linspace(3.0, 15.0, 10) patch = testing_fakes.PatchAllFakes( binary_map=self.BINARY_MAP, binaries=FakeBinaries(), worker_factory=batch_sim_worker.PowerCurveSimWorker, client_class=batch_sim_client.PowerCurveSimClient) with os_util.TempDir() as temp_dir: with patch, test_util.DisableWarnings(): gflags.FLAGS.output_dir = temp_dir client = batch_sim_client.PowerCurveSimClient( num_workers=self.NUM_WORKERS, sim_name=self.SIM_NAME) client.Run() # Check that a data file containing the power curve was created. data_file = h5py.File(os.path.join(temp_dir, 'data.h5')) wind_speeds = data_file['wind_speed'][:] powers = data_file['crosswind_power'][:] sim_successes = data_file['sim_success'][:] for w, p, s in zip(wind_speeds, powers, sim_successes): self.assertAlmostEqual(1000.0 * w**3, p, delta=1e-8) self.assertEqual(s, w < 4.0 or w > 6.0) # Check that all the plots and HTML files were created. self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'mean_flap_deflections.svg'))) self.assertTrue( os.path.isfile( os.path.join( temp_dir, 'standard_deviation_flap_deflections.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'faults_vs_wind_speed.png'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'power_curve_by_throttle.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'power_and_tension_curve.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'angles_vs_wind_speed.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'angle_errors_vs_wind_speed.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'radius_vs_wind_speed.svg'))) self.assertTrue( os.path.isfile( os.path.join(temp_dir, 'curvature_errors_vs_wind_speed.svg'))) self.assertTrue( os.path.isfile(os.path.join(temp_dir, 'index.html')))
def testDeleteTempDirsFlag(self): FLAGS.delete_temp_dirs = False with mock.patch(os_util.__name__ + '.logging.info') as mock_log: with os_util.TempDir('foo') as temp_dir: self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(mock_log.assert_any_call) shutil.rmtree(temp_dir)
def _SetupAndRunWorker(self, instance_name): """Sets up and runs a worker. This method fakes a lot of the behavior that is ordinarily performed by the worker's startup script. It would be nice to find a good way to keep the two in sync. Args: instance_name: Name of the worker instance. """ # Convert metadata from {'key': ..., 'value': ...} pairs to a dict. metadata = {} for item in self.metadata_by_instance_name[instance_name]['items']: metadata[item['key']] = item['value'] with mock.patch(gcloud_fakes.__name__ + '.FakeFilesystem.LOCAL', gcloud_fakes.FakeFilesystem()) as worker_fs: worker_fs.Save(os.path.join(makani.HOME, 'lib/scripts/sim_tcpdump.sh'), _MakeFakeExecutable('TcpDump')) gstorage = gcloud_fakes.FakeCloudStorageApi() # Download and unpack packages. for package_path in (metadata['worker_package_path'], metadata['config_package_path']): relative_path = package_path.replace('gs://makani/', '') stream = io.BytesIO() gstorage.DownloadFile(relative_path, stream) package = json.loads(stream.getvalue()) assert 'files' in package for filename, contents in package['files'].items(): worker_fs.Save(filename, contents) with test_util.FlagValueSaver(), os_util.TempDir() as config_dir: FLAGS.Reset() FLAGS(metadata['exec_cmd'].split()) # We cheat a little bit with the config directory. FLAGS.config_dir # refers to worker_fs, whereas we want the worker itself to load files # from a real directory. first, last = [int(i) for i in FLAGS.config_range] for i in range(first, last + 1): content = worker_fs.Load('%s/%d.json' % (FLAGS.config_dir, i)) open(os.path.join(config_dir, '%d.json' % i), 'w').write(content) FLAGS.config_dir = config_dir worker = self._worker_factory() try: worker.Run() except _FakeError as e: error_dir = metadata['error_log_dir'].replace('gs://makani/', '') stream = io.BytesIO(str(e)) gstorage.UploadStream(stream, '%s/%s.LOG' % (error_dir, instance_name))
def testDeleteTempDirsOnErrorFlag(self): FLAGS.delete_temp_dirs_on_error = False with mock.patch(os_util.__name__ + '.logging.warning') as mock_log: with self.assertRaises(ValueError): with os_util.TempDir('foo') as temp_dir: self.assertTrue(os.path.isdir(temp_dir)) raise ValueError self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(mock_log.assert_any_call) shutil.rmtree(temp_dir)
def testClientAndWorker(self): class FakeBinaries(testing_fakes.FakeBinaries): _NUM_SAMPLES = 100 def Controller(self, args): return 0 def GroundEstimator(self, args): return 0 def Simulator(self, args): return 0 def PcapToHdf5(self, args): log_file_name = self._ParseArg(args, '--output_file') MakeFakeLogFile(log_file_name, self._NUM_SAMPLES) return 0 patch = testing_fakes.PatchAllFakes( binary_map=self.BINARY_MAP, binaries=FakeBinaries(), worker_factory=batch_sim_worker.IecCasesSimWorker, client_class=batch_sim_client.IecCasesSimClient) with os_util.TempDir() as temp_dir: with patch: gflags.FLAGS.output_dir = temp_dir client = batch_sim_client.IecCasesSimClient( num_workers=self.NUM_WORKERS, sim_name=self.SIM_NAME) client.Run() # Check that all the plots and HTML files were created. image_files = [ f for f in os.listdir(temp_dir) if re.match(r'.+\.png', f) ] self.assertEqual(len(batch_sim_client.IEC_CASES), len(image_files)) self.assertTrue( os.path.isfile(os.path.join(temp_dir, 'index.html')))
def testClientAndWorker(self): FLAGS.wind_speeds = numpy.linspace(3.0, 15.0, 3) FLAGS.num_amplitudes = 2 FLAGS.show_events = False patch = testing_fakes.PatchAllFakes( binary_map=self.BINARY_MAP, binaries=FakeBinaries(), worker_factory=batch_sim_worker.HoverDisturbancesSimWorker, client_class=batch_sim_client.HoverDisturbancesSimClient) with os_util.TempDir() as temp_dir: with patch: gflags.FLAGS.output_dir = temp_dir client = batch_sim_client.HoverDisturbancesSimClient( num_workers=self.NUM_WORKERS, sim_name=self.SIM_NAME) client.Run() self.assertTrue( os.path.isfile(os.path.join(temp_dir, 'style.css'))) self.assertTrue( os.path.isfile(os.path.join(temp_dir, 'index.html')))
def Run(self): """Runs the client from start to finish.""" def OutputCmpKey(filename): """For filename X.json, uses int(X) as a sorting key.""" basename = os.path.basename(filename) parts = basename.split('.') assert len(parts) == 2 and parts[1] == 'json' return int(basename.split('.')[0]) if FLAGS.delete_old_workers: self.DeleteOldWorkers() return if FLAGS.reduce_only: assert self._local_output_dir, 'No local_output_dir was specified.' assert os.path.exists(self._local_output_dir), ( 'local_output_dir (%s) does not exist.' % self._local_output_dir) outputs = set([ f for f in os.listdir(self._local_output_dir) if re.match(r'\d+\.json', f) ]) if outputs: assert '%d.json' % (len(outputs) - 1) in outputs, ( 'Some output files are missing.') output_paths = sorted( [os.path.join(self._local_output_dir, f) for f in outputs], key=OutputCmpKey) self._ReduceWorkerOutput(output_paths) return with os_util.TempDir(self._sim_name) as working_dir: if not self._local_output_dir: self._local_output_dir = os.path.join(working_dir, 'worker_output') if not os.path.exists(self._local_output_dir): os.makedirs(self._local_output_dir) worker_package_path = os.path.join( makani.HOME, self._BASE_PATH + '_worker_package.par') if FLAGS.use_local_worker: shutil.copy(worker_package_path, os.path.join(working_dir, 'worker.par')) elif FLAGS.upload_worker_package: self._UploadFile(worker_package_path, 'worker.par') logging.info('Making config package.') config_package_path = os.path.join(working_dir, 'configs.tar.gz') num_configs = self._MakeConfigPackage(config_package_path) if not FLAGS.use_local_worker: self._UploadFile(config_package_path, 'configs.tar.gz') if FLAGS.use_local_worker: self._RunLocalWorker(num_configs, working_dir) worker_names = [] else: worker_names = self._RunRemoteWorkers(num_configs) if not self._worker_error: expected_outputs = ['%d.json' % i for i in xrange(num_configs)] actual_outputs = set([ f for f in os.listdir(self._local_output_dir) if re.match(r'\d+\.json', f) ]) for e in expected_outputs: assert e in actual_outputs, ( 'Expected file %s is not in the output directory: %s.' % (e, self._local_output_dir)) output_paths = [ os.path.join(self._local_output_dir, f) for f in expected_outputs ] self._ReduceWorkerOutput(output_paths) self._DeleteWorkers(worker_names)
num_y=FLAGS.num_y, time_step=FLAGS.time_step, duration=FLAGS.duration, hub_height=hub_height, width=FLAGS.width, height=FLAGS.height, v_flow_ang=FLAGS.v_flow_ang, wind_shear_exp=FLAGS.wind_shear_exp, turb_model=FLAGS.turb_model, standard=FLAGS.iec_standard, turbc=FLAGS.iec_turbc, wind_type=FLAGS.iec_wind_type, mean_wind_speed=FLAGS.mean_wind_speed) print 'Writing TurbSim input...' with os_util.TempDir() as turbsim_dir: with os_util.ChangeDir(turbsim_dir): input_file = os.path.join(turbsim_dir, 'TurbSim.inp') with open(input_file, 'w') as f: f.write(turbsim_input) print 'Running TurbSim...' try: subprocess.check_call([ 'wine', '/opt/makani/third_party/TurbSim/TurbSim64.exe', 'TurbSim.inp' ]) except: raise Exception('Could not find 64bit version of TurbSim. ' 'To update, run a new build or try:\n' ' cd /opt/makani/third_party && git fetch && '
def testDeleteDirOnExit(self): with os_util.TempDir() as temp_dir: self.assertTrue(os.path.isdir(temp_dir)) self.assertFalse(os.path.exists(temp_dir))