Exemplo n.º 1
0
#SBATCH --clusters=serial
#SBATCH --time=96:00:00
#
# if a job might take over 2 days to be executed

import os
import sys
import stat
import math

#Classes containing sweet compile/run basic option
from mule_local.JobGeneration import *
from mule.SWEETRuntimeParametersScenarios import *

#Create main compile/run options
jg = JobGeneration()

# Request dedicated compile script
jg.compilecommand_in_jobscript = False

# Wallclock time
max_wallclock_seconds = 2 * 24 * 60 * 60
ref_max_wallclock_seconds = 48 * 60 * 60

#Get Earth parameters (if necessary)
earth = EarthMKSDimensions()

#
# Run simulation on plane or sphere
#
#Basic plane options
Exemplo n.º 2
0
    def __load_job_raw_data(self, jobdir=None):
        """
		Parse all output.out files and extract all kind of job output information

		Return a dictionary with content from the job directories
		{
			#
			# Dictionary with data from job generation
			# (read from [jobdir]/jobgeneration.pickle)
			#
			'jobgeneration':	# From jobgeneration.pickle
			{
				'compile': [...],
				'runtime': [...],
				'parallelization': [...],
				'platforms_platform': [...],
				'platform_resources': [...],
			},
			'output':		# From output.out with prefix [MULE]
			{
				'simulation_benchmark_timings.main': [value],
				'simulation_benchmark_timings.main_simulationLoop': [value],
				[...]
			},
			'[filename(.pickle)]':
			{
				[...]
			}
			},
			'[filename(.pickle)]':
			{
				[...]
			}
		"""

        #
        self.__job_raw_data = {}

        if self.verbosity > 5:
            self.info("")
            self.info("Processing '" + jobdir + "'")
        """
		* Process 'output.out'
		"""
        if self.verbosity > 5:
            self.info("Loading job output file 'output.out'")
        outfile = jobdir + '/output.out'
        try:
            #if True:
            with open(outfile, 'r') as f:
                content = f.readlines()
                self.__job_raw_data['output'] = self.__parse_job_output(
                    content)

        except Exception as err:
            print("*" * 80)
            print("* WARNINNG: opening '" + outfile + "' (ignoring)")
            print("* " + str(err))
            print("*" * 80)
        """
		Process 'jobgeneration.pickle'
		"""
        # Ensure that 'jobgeneration.pickle' exists
        jobgenerationfile = jobdir + '/jobgeneration.pickle'
        if self.verbosity > 5:
            self.info("Loading 'jobgeneration.pickle'")
        j = JobGeneration(dummy_init=True)
        self.__job_raw_data['jobgeneration'] = j.load_attributes_dict(
            jobgenerationfile)
        """
		Process other '*.pickle'
		"""
        pickle_files = glob.glob(jobdir + '/*.pickle')

        # Iterate over all found pickle files
        for picklefile in pickle_files:
            filename = os.path.basename(picklefile)
            tag = filename.replace('.pickle', '')
            if tag == 'jobgeneration':
                continue

            if self.verbosity > 5:
                self.info("Loading pickle file '" + filename + "'")

            import pickle
            with open(picklefile, 'rb') as f:
                self.__job_raw_data[tag] = pickle.load(f)