def abort(msg): """Abort a run with an error message. We assume logging has been set up. """ log.critical('ABORT: %s' % msg) # try to save the log file to S3 first try: s3 = s3_connect() bucket = s3.create_bucket(S3Bucket) key_str = ('%s/%s-%s-%s-%s.log' % (S3AbortDir, User, Project, Scenario, Setup)) key = bucket.new_key(key_str) key.set_contents_from_filename(LogFile) key.set_acl('public-read') send_sqs_message(status=StatusAbort, generated_datafile=key_str, message=msg) except: # if we get here, we can't save the log file send_sqs_message(status=StatusAbort, message=msg) # then stop the AMI shutdown()
def excepthook(type, value, tb): """Exception hook routine.""" msg = '\n' + '='*80 + '\n' msg += 'Uncaught exception:\n' msg += ''.join(traceback.format_exception(type, value, tb)) msg += '='*80 + '\n' log.critical(msg)
def abort(msg): """Abort a run with an error message. We assume logging has been set up. """ log.critical('ABORT: %s' % msg) send_message(status=StatusAbort, message=msg) # then stop the AMI shutdown()
def error(msg): """Send an ERROR message. We assume logging has NOT been set up. Stop the process right here. Can't use shutdown(). """ log.critical('ERROR: %s' % msg) send_message_lite(status=StatusERROR, message=msg) # then stop the AMI terminate_instance()
def error(msg): """Send an ERROR SQS message. We assume logging has NOT been set up. Stop the process right here. Can't use shutdown(). """ log.critical('ERROR: %s' % msg) send_sqs_message_lite(status=StatusERROR, message=msg) # then stop the AMI os.system('sudo halt')
def run_tsudat(json_data): """Run ANUGA on the Amazon EC2. json_data the path to the JSON data file Returns the boto instance object for the running image. """ # plug our exception handler into the python system sys.excepthook = excepthook # get JSON data and adorn project object with its data adorn_project(json_data) # default certain values if not supplied in JSON data default_project_values() # set logfile to be in run output folder if project.debug: log.log_logging_level = log.DEBUG log.log_filename = os.path.join(project.output_folder, 'ui.log') if project.debug: dump_project_py() # do all required data generation before EC2 run log.info('#'*90) log.info('# Launching simulation') log.info('#'*90) # copy all required python modules to scripts directory ec2_name = os.path.join(ScriptsDir, Ec2RunTsuDATOnEC2) log.debug("Copying EC2 run file '%s' to scripts directory '%s'." % (Ec2RunTsuDAT, ec2_name)) shutil.copy(Ec2RunTsuDAT, ec2_name) for extra in RequiredFiles: log.info('Copying %s to S3 scripts directory' % extra) shutil.copy(extra, ScriptsDir) # dump the current 'projects' object back into JSON, put in 'scripts' json_file = os.path.join(ScriptsDir, JsonDataFilename) log.info('Dumping JSON to file %s' % json_file) dump_json_to_file(project, json_file) dump_project_py() # bundle up the working directory, put it into S3 zipname = ('%s-%s-%s-%s.zip' % (project.user, project.project, project.scenario, project.setup)) zip_tmp_dir = tempfile.mkdtemp(prefix='tsudat2_zip_') zippath = os.path.join(zip_tmp_dir, zipname) log.info('Making zip %s from %s' % (zippath, project.working_directory)) make_dir_zip(project.working_directory, zippath) os.system('ls -l %s' % zip_tmp_dir) s3_name = os.path.join(project.InputS3DataDir, zipname) try: s3 = s3_connect() bucket = s3.create_bucket(project.S3Bucket) key = bucket.new_key(s3_name) log.info('Creating S3 file: %s/%s' % (project.S3Bucket, s3_name)) key.set_contents_from_filename(zippath) log.info('Done!') key.set_acl('public-read') except boto.exception.S3ResponseError, e: log.critical('S3 error: %s' % str(e)) print('S3 error: %s' % str(e)) sys.exit(10)
'getsww': getsww, #DELETE ME 'Debug': 'debug' if project.debug else 'production'} user_data = json.dumps(user_data, ensure_ascii=True, separators=(',', ':')) # save user data into <work_dir>/restart restart_file = os.path.join(project.working_directory, '_restart_', '%s_%s_%s_%s.restart' % (project.user, project.project, project.scenario, project.setup)) with open(restart_file, 'wb') as fp: fp.write(user_data) # actually start the instance log.info('Starting AMI %s, user_data=%s' % (DefaultAmi, str(user_data))) try: instance = start_ami(DefaultAmi, user_data=user_data) except boto.exception.EC2ResponseError, e: log.critical('EC2 error: %s' % str(e)) print('EC2 error: %s' % str(e)) sys.exit(10) print('*'*80) print('* Started instance: %s' % instance.dns_name) print('*'*80) log.info('Started instance: %s' % instance.dns_name) log.debug('instance: %s' % str(dir(instance))) return instance
def bootstrap(): """Bootstrap the TsuDAT run into existence. The following globals are used (all are strings): User user name Project the TsuDAT project Scenario the scenario Setup the run setup ('trial', etc) BaseDir base of the tsudat working directory """ log.info('bootstrap start, user_data globals:') log.info(' User=%s' % User) log.info(' Project=%s' % Project) log.info(' Scenario=%s' % Scenario) log.info(' Setup=%s' % Setup) log.info(' BaseDir=%s' % BaseDir) log.info(' Debug=%s' % Debug) log.info(' Instance=%s' % Instance) send_sqs_message(status=StatusStart) # get name of ZIP working file zip_name = DataFileFormat % (User, Project, Scenario, Setup) # get an S3 connection s3 = s3_connect() # load the input data files from S3 key_str = ('%s/%s' % (InputS3DataDir, zip_name)) log.info('Loading %s from S3 ...' % key_str) bucket = s3.get_bucket(S3Bucket) if bucket is None: abort("Can't find bucket '%s'" % S3Bucket) try: key = bucket.get_key(key_str) except S3ResponseError: abort("Can't find key '%s' in bucket '%s'" % (key_str, S3Bucket)) if key is None: abort("Can't find key '%s' in bucket '%s'" % (key_str, S3Bucket)) key.get_contents_to_filename(InputZipFile) log.info('Done') # unzip the input data ZIP file into the local directory log.debug('Unzipping %s ...' % InputZipFile) z = zipfile.ZipFile(InputZipFile) z.extractall(path='/') if not Debug: os.remove(InputZipFile) log.debug('Done') # now load any generated data from a previous run key_str = ('%s/%s' % (OutputS3DataDir, zip_name)) log.info('Trying to load %s from S3 ...' % key_str) try: bucket = s3.get_bucket(S3Bucket) key = bucket.get_key(key_str) if key: key.get_contents_to_filename(OutputZipFile) log.info('Done') # unpack generated data into working directory log.debug('Unzipping %s ...' % OutputZipFile) z = zipfile.ZipFile(OutputZipFile) z.extractall(path='/') if not Debug: os.remove(OutputZipFile) log.debug('Done') else: log.info('Previously generated data not found') except S3ResponseError: log.info('Previously generated data not found') # jigger the PYTHONPATH so we can import 'run_tsudat' from the S3 data new_pythonpath = os.path.join(BaseDir, User, Project, Scenario, Setup, ScriptsDirectory) sys.path.append(new_pythonpath) log.debug('Added additional import path=%s' % new_pythonpath) # get the code for the rest of the simulation import run_tsudat # get path to the JSON file in scripts dir, pass to run_tsudat() json_path = os.path.join(new_pythonpath, JSONFile) log.info('Running run_tsudat.run_tsudat()') gen_files = run_tsudat.run_tsudat(json_path, logger=run_tsudat_log) # add local log files to the 'log' entry gen_files['log'] = glob.glob('*.log') # before we possibly delete the gen_files['sww'], get output path save_zip_base = os.path.dirname(gen_files['sww'][0])[1:] log.debug('save_zip_base=%s' % save_zip_base) # if user data shows 'getsww' as False, remove 'sww' key from dictionary if not UserData.get('GETSWW', True): msg = 'Userdata says not to save SWW files, deleting...' log.info(msg) send_sqs_message(status=StatusLog, msg=msg) del gen_files['sww'] # optionally dump returned file data if Debug: import pprint pp = pprint.PrettyPrinter(indent=4) gen_str = pprint.pformat(gen_files) log.debug('Returned files:\n%s' % gen_str) # save generated data to a staging directory # want same pathname for each file as in input ZIP archive shutil.rmtree(save_zip_base, ignore_errors=True) # just in case os.makedirs(save_zip_base) for key in gen_files: for f in gen_files[key]: log.debug('Copying %s -> %s' % (f, save_zip_base)) shutil.copy2(f, save_zip_base) # ZIP the generated directory log.debug('zipping dir: %s' % save_zip_base) make_dir_zip(save_zip_base, OutputZipFile) # save generated directory back to S3 s3_name = '%s/%s' % (OutputS3DataDir, zip_name) zip_size = os.path.getsize(OutputZipFile) zip_size_mb = float(zip_size) / (1024*1024) log.info('Saving %s (%.2fMB) to S3.' % (s3_name, zip_size_mb)) try: bucket = s3.create_bucket(S3Bucket) key = bucket.new_key(s3_name) log.debug('Creating S3 file: %s/%s' % (S3Bucket, s3_name)) key.set_contents_from_filename(OutputZipFile) log.debug('Done!') key.set_acl('public-read') except boto.exception.S3ResponseError, e: log.critical('S3 error: %s' % str(e)) sys.exit(10)