def send_message(**kwargs): """Send a message to server. kwargs a dict of keyword arguments Send a JSON representation of the kwargs dict. Add the User, Project, Scenario, Setup global values. """ # add the global values kwargs['user'] = User kwargs['project'] = Project kwargs['scenario'] = Scenario kwargs['setup'] = Setup kwargs['instance'] = Instance kwargs['project_id'] = ProjectID kwargs['scenario_id'] = ScenarioID # add time as float and string (UTC, ISO 8601 format) kwargs['time'] = time.time() kwargs['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%SZ', time.gmtime()) # get JSON string msg = json.dumps(kwargs) log.debug('message JSON: %s' % msg) amqp.post_server_message(msg)
def make_dir_zip(dirname, zipname): """Make a ZIP file from a directory. dirname path to directory to zip up zipname path to ZIP file to create """ log.debug('zip -q -r %s %s' % (zipname, dirname)) os.system('zip -q -r %s %s' % (zipname, dirname))
def shutdown(): """Shutdown this AMI.""" log.debug('Debug is %s, instance is %sterminating immediately' % (str(Debug), 'not ' if Debug else '')) if Debug: wait_a_while() terminate_instance()
def shutdown(): """Shutdown this AMI.""" log.debug('Debug is %s, instance is %sterminating immediately' % (str(Debug), 'not ' if Debug else '')) if Debug: wait_a_while() send_sqs_message(status=StatusStop) os.system('sudo halt')
def terminate_instance(): """Terminate the instance, release public IP.""" # get public IP public_ip = get_public_ip() # disassociate IP with this instance and terminate the instance cmd = '/usr/bin/euca-disassociate-address %s' % public_ip log.debug('Doing: %s' % cmd) with os.popen(cmd) as fd: result = fd.readline() log.debug('result: %s' % str(result)) cmd = '/usr/bin/euca-terminate-instances %s' % Instance log.debug('Doing: %s' % cmd) with os.popen(cmd) as fd: result = fd.readline() log.debug('result: %s' % str(result)) sys.exit(0)
def run_tsudat(json_data): """Run ANUGA on the Amazon EC2. json_data the path to the JSON data file Returns the boto instance object for the running image. """ # plug our exception handler into the python system sys.excepthook = excepthook # get JSON data and adorn project object with its data adorn_project(json_data) # default certain values if not supplied in JSON data default_project_values() # set logfile to be in run output folder if project.debug: log.log_logging_level = log.DEBUG log.log_filename = os.path.join(project.output_folder, 'ui.log') if project.debug: dump_project_py() # do all required data generation before EC2 run log.info('#'*90) log.info('# Launching simulation') log.info('#'*90) # copy all required python modules to scripts directory ec2_name = os.path.join(ScriptsDir, Ec2RunTsuDATOnEC2) log.debug("Copying EC2 run file '%s' to scripts directory '%s'." % (Ec2RunTsuDAT, ec2_name)) shutil.copy(Ec2RunTsuDAT, ec2_name) for extra in RequiredFiles: log.info('Copying %s to S3 scripts directory' % extra) shutil.copy(extra, ScriptsDir) # dump the current 'projects' object back into JSON, put in 'scripts' json_file = os.path.join(ScriptsDir, JsonDataFilename) log.info('Dumping JSON to file %s' % json_file) dump_json_to_file(project, json_file) dump_project_py() # bundle up the working directory, put it into S3 zipname = ('%s-%s-%s-%s.zip' % (project.user, project.project, project.scenario, project.setup)) zip_tmp_dir = tempfile.mkdtemp(prefix='tsudat2_zip_') zippath = os.path.join(zip_tmp_dir, zipname) log.info('Making zip %s from %s' % (zippath, project.working_directory)) make_dir_zip(project.working_directory, zippath) os.system('ls -l %s' % zip_tmp_dir) s3_name = os.path.join(project.InputS3DataDir, zipname) try: s3 = s3_connect() bucket = s3.create_bucket(project.S3Bucket) key = bucket.new_key(s3_name) log.info('Creating S3 file: %s/%s' % (project.S3Bucket, s3_name)) key.set_contents_from_filename(zippath) log.info('Done!') key.set_acl('public-read') except boto.exception.S3ResponseError, e: log.critical('S3 error: %s' % str(e)) print('S3 error: %s' % str(e)) sys.exit(10)
bucket = s3.create_bucket(project.S3Bucket) key = bucket.new_key(s3_name) log.info('Creating S3 file: %s/%s' % (project.S3Bucket, s3_name)) key.set_contents_from_filename(zippath) log.info('Done!') key.set_acl('public-read') except boto.exception.S3ResponseError, e: log.critical('S3 error: %s' % str(e)) print('S3 error: %s' % str(e)) sys.exit(10) # clean up the local filesystem dir_path = os.path.join(project.working_directory, project.user) # log.debug('Deleting work directory: %s' % dir_path) # shutil.rmtree(dir_path) log.debug('Deleting zipped S3 data: %s' % zippath) shutil.rmtree(zippath, ignore_errors=True) # WHEN WE NO LONGER NEED THE 'GETSWW' OPTION, DELETE ALL LINES: #DELETE ME # for now, assume ['getsww': False] if project.getsww undefined #DELETE ME try: #DELETE ME getsww = project.getsww #DELETE ME except AttributeError: #DELETE ME getsww = False #DELETE ME # get JSON for userdata user_data = {'User': project.user, 'Project': project.project, 'Scenario': project.scenario, 'Setup': project.setup, 'BaseDir': project.working_directory,
def bootstrap(): """Bootstrap the TsuDAT run into existence. The following globals are used (all are strings): User user name Project the TsuDAT project Scenario the scenario Setup the run setup ('trial', etc) BaseDir base of the tsudat working directory """ log.info('bootstrap start, user_data globals:') log.info(' User=%s' % User) log.info(' UserDir=%s' % UserDir) log.info(' Project=%s' % Project) log.info(' Scenario=%s' % Scenario) log.info(' Setup=%s' % Setup) log.info(' BaseDir=%s' % BaseDir) log.info(' Debug=%s' % Debug) log.info(' Instance=%s' % Instance) send_message(status=StatusStart) # jigger the PYTHONPATH so we can import 'run_tsudat' from the common filesystem new_pythonpath = os.path.join(UserDir, Project, Scenario, Setup, ScriptsDirectory) sys.path.append(new_pythonpath) log.debug('Added additional import path=%s' % new_pythonpath) # get the code for the rest of the simulation import run_tsudat # get path to the JSON file in scripts dir, pass to run_tsudat() json_path = os.path.join(new_pythonpath, JSONFile) log.info('Running run_tsudat.run_tsudat(%s)' % json_path) gen_files = run_tsudat.run_tsudat(json_path) # add local log files to the 'log' entry (*.log, *.out) output_path = os.path.join(UserDir, Project, Scenario, Setup, OutputsDirectory) local_log_files = [] local_logs = glob.glob('*.log') for l_l in local_logs: dst = os.path.join(output_path, l_l) shutil.copyfile(l_l, dst) local_log_files.append(dst) local_logs = glob.glob('*.out') for l_l in local_logs: dst = os.path.join(output_path, l_l) shutil.copyfile(l_l, dst) local_log_files.append(dst) gen_files['log'] = local_log_files # # before we possibly delete the gen_files['sww'], get output path # save_zip_base = os.path.dirname(gen_files['sww'][0])[1:] # log.debug('save_zip_base=%s' % save_zip_base) # if user data shows 'getsww' as False, remove 'sww' key from dictionary if not UserData.get('GETSWW', True): msg = 'Userdata says not to save SWW files, deleting...' log.info(msg) del gen_files['sww'] # optionally dump returned file data if Debug: import pprint pp = pprint.PrettyPrinter(indent=4) gen_str = pprint.pformat(gen_files) log.debug('Returned files:\n%s' % gen_str) # convert gen_files to JSON and add to stopping message send_message(status=StatusStop, payload=gen_files) # stop this AMI log.info('run_tsudat() finished, shutting down') shutdown()
msg = '\n'.join(error_msg) print(msg) error(msg) # set userdata defaults, etc if not UserData.get('DEBUG', ''): UserData['DEBUG'] = 'production' # set globals: common use variables, debug and logging, etc User = UserData['USER'] UserDir = UserData['USERDIR'] Project = UserData['PROJECT'] Scenario = UserData['SCENARIO'] Setup = UserData['SETUP'] BaseDir = UserData['BASEDIR'] if UserData['DEBUG'].lower() == 'debug': Debug = True level = log.DEBUG else: Debug = False level = log.INFO log = log.Log(LogFile, level=level) log.debug('UserData=%s' % str(UserData)) # do it! bootstrap()
def bootstrap(): """Bootstrap the TsuDAT run into existence. The following globals are used (all are strings): User user name Project the TsuDAT project Scenario the scenario Setup the run setup ('trial', etc) BaseDir base of the tsudat working directory """ log.info('bootstrap start, user_data globals:') log.info(' User=%s' % User) log.info(' Project=%s' % Project) log.info(' Scenario=%s' % Scenario) log.info(' Setup=%s' % Setup) log.info(' BaseDir=%s' % BaseDir) log.info(' Debug=%s' % Debug) log.info(' Instance=%s' % Instance) send_sqs_message(status=StatusStart) # get name of ZIP working file zip_name = DataFileFormat % (User, Project, Scenario, Setup) # get an S3 connection s3 = s3_connect() # load the input data files from S3 key_str = ('%s/%s' % (InputS3DataDir, zip_name)) log.info('Loading %s from S3 ...' % key_str) bucket = s3.get_bucket(S3Bucket) if bucket is None: abort("Can't find bucket '%s'" % S3Bucket) try: key = bucket.get_key(key_str) except S3ResponseError: abort("Can't find key '%s' in bucket '%s'" % (key_str, S3Bucket)) if key is None: abort("Can't find key '%s' in bucket '%s'" % (key_str, S3Bucket)) key.get_contents_to_filename(InputZipFile) log.info('Done') # unzip the input data ZIP file into the local directory log.debug('Unzipping %s ...' % InputZipFile) z = zipfile.ZipFile(InputZipFile) z.extractall(path='/') if not Debug: os.remove(InputZipFile) log.debug('Done') # now load any generated data from a previous run key_str = ('%s/%s' % (OutputS3DataDir, zip_name)) log.info('Trying to load %s from S3 ...' % key_str) try: bucket = s3.get_bucket(S3Bucket) key = bucket.get_key(key_str) if key: key.get_contents_to_filename(OutputZipFile) log.info('Done') # unpack generated data into working directory log.debug('Unzipping %s ...' % OutputZipFile) z = zipfile.ZipFile(OutputZipFile) z.extractall(path='/') if not Debug: os.remove(OutputZipFile) log.debug('Done') else: log.info('Previously generated data not found') except S3ResponseError: log.info('Previously generated data not found') # jigger the PYTHONPATH so we can import 'run_tsudat' from the S3 data new_pythonpath = os.path.join(BaseDir, User, Project, Scenario, Setup, ScriptsDirectory) sys.path.append(new_pythonpath) log.debug('Added additional import path=%s' % new_pythonpath) # get the code for the rest of the simulation import run_tsudat # get path to the JSON file in scripts dir, pass to run_tsudat() json_path = os.path.join(new_pythonpath, JSONFile) log.info('Running run_tsudat.run_tsudat()') gen_files = run_tsudat.run_tsudat(json_path, logger=run_tsudat_log) # add local log files to the 'log' entry gen_files['log'] = glob.glob('*.log') # before we possibly delete the gen_files['sww'], get output path save_zip_base = os.path.dirname(gen_files['sww'][0])[1:] log.debug('save_zip_base=%s' % save_zip_base) # if user data shows 'getsww' as False, remove 'sww' key from dictionary if not UserData.get('GETSWW', True): msg = 'Userdata says not to save SWW files, deleting...' log.info(msg) send_sqs_message(status=StatusLog, msg=msg) del gen_files['sww'] # optionally dump returned file data if Debug: import pprint pp = pprint.PrettyPrinter(indent=4) gen_str = pprint.pformat(gen_files) log.debug('Returned files:\n%s' % gen_str) # save generated data to a staging directory # want same pathname for each file as in input ZIP archive shutil.rmtree(save_zip_base, ignore_errors=True) # just in case os.makedirs(save_zip_base) for key in gen_files: for f in gen_files[key]: log.debug('Copying %s -> %s' % (f, save_zip_base)) shutil.copy2(f, save_zip_base) # ZIP the generated directory log.debug('zipping dir: %s' % save_zip_base) make_dir_zip(save_zip_base, OutputZipFile) # save generated directory back to S3 s3_name = '%s/%s' % (OutputS3DataDir, zip_name) zip_size = os.path.getsize(OutputZipFile) zip_size_mb = float(zip_size) / (1024*1024) log.info('Saving %s (%.2fMB) to S3.' % (s3_name, zip_size_mb)) try: bucket = s3.create_bucket(S3Bucket) key = bucket.new_key(s3_name) log.debug('Creating S3 file: %s/%s' % (S3Bucket, s3_name)) key.set_contents_from_filename(OutputZipFile) log.debug('Done!') key.set_acl('public-read') except boto.exception.S3ResponseError, e: log.critical('S3 error: %s' % str(e)) sys.exit(10)