Beispiel #1
0
 def _getUserDir(self, dir_type, tool = None, create=False):
     base_dir = dict(base='', config=User.CONFIG_DIR, cache=User.CACHE_DIR, output=User.OUTPUT_DIR, \
                      plots=User.PLOTS_DIR, processes=User.PROCESSES_DIR, \
                      scheduler_in=config.get(config.SCHEDULER_INPUT_DIR), \
                      scheduler_out=config.get(config.SCHEDULER_OUTPUT_DIR))
      
     if tool is None:
         bd = base_dir[dir_type]
         # concatenate relative paths only
         if bd and bd[0]=='/':
             dir_name = bd
         else:
             #return the directory where the tool configuration files are stored
             dir_name = os.path.join(self._getUserBaseDir(), bd)
     else:
         #It's too confusing if we create case sensitive directories...
         tool = tool.lower()
         #return the specific directory for the given tool            
         dir_name =  os.path.join(self._getUserBaseDir(), base_dir[dir_type], tool)
     
     #make sure we have a canonical path
     dir_name = os.path.abspath(dir_name)
     
     if create and not os.path.isdir(dir_name):
         #we are letting this fail in case of problems.
         utils.supermakedirs(dir_name, 0755)
         
     return dir_name
Beispiel #2
0
    def test_central_directory_Creation(self):
        tmp_dir = tempfile.mkdtemp(__name__)
        config._config[config.BASE_DIR_LOCATION] = tmp_dir
        config._config[
            config.
            DIRECTORY_STRUCTURE_TYPE] = config.DIRECTORY_STRUCTURE.CENTRAL
        testUser = DummyUser(random_home=False, **Test.DUMMY_USER)

        dir1 = testUser.getUserBaseDir()
        self.assertEquals(
            dir1,
            os.path.join(config.get(config.BASE_DIR_LOCATION),
                         config.get(config.BASE_DIR), str(testUser.getName())))
        dir2 = testUser.getUserOutputDir('sometool')
        self.assertEquals(
            dir2,
            os.path.join(config.get(config.BASE_DIR_LOCATION),
                         config.get(config.BASE_DIR), str(testUser.getName()),
                         User.OUTPUT_DIR, 'sometool'))
        print dir2

        config.reloadConfiguration()

        if os.path.isdir(tmp_dir) and tmp_dir.startswith(
                tempfile.gettempdir()):
            #make sure the home is a temporary one!!!
            print "Cleaning up %s" % tmp_dir
            shutil.rmtree(tmp_dir)
Beispiel #3
0
 def _getUserBaseDir(self):
     if self._dir_type == config.DIRECTORY_STRUCTURE.LOCAL:
         return os.path.join(self.getUserHome(), config.get(config.BASE_DIR))
     elif self._dir_type == config.DIRECTORY_STRUCTURE.CENTRAL:
         return os.path.join(config.get(config.BASE_DIR_LOCATION), config.get(config.BASE_DIR), str(self.getName()))
     elif self._dir_type == config.DIRECTORY_STRUCTURE.SCRATCH:
         return os.path.join(config.get(config.BASE_DIR_LOCATION), str(self.getName()), config.get(config.BASE_DIR))
Beispiel #4
0
 def _run(self):
     root_path = config.get('project_data')
     # Setup argument parser
     args = self.args
     crawl_dir = args.path
     
     t1 = time.time()        
     sys.stderr.flush()
     # For miklip we want to use a prefix
     # TODO: Get this directly from config?
     project = config.get('project_name')
     path_prefix = ''
     if project.lower() == 'miklip':
         path_prefix = 'user-'
     user_root_path = os.path.join(root_path, path_prefix+User().getName()) 
     if crawl_dir:
         if root_path not in crawl_dir:
             raise Exception('You are only allowed to crawl data in this root path %s' % root_path)
     else:
         crawl_dir = user_root_path
     path = os.path.dirname(os.path.abspath(__file__))
     #script_path = path + '/../../../sbin/crawl_data'
     script_path = path + '/../../../sbin/solr_server path2ingest'
     print 'Please wait while the system is crawling your data'
     out = subprocess.Popen('/bin/bash '+script_path + ' ' + crawl_dir, shell=True)
     print out.wait()
     print 'Finished.\nCrawling took ' + str(time.time() - t1) + ' seconds'
Beispiel #5
0
 def test_get(self):
     base_dir = config.get(config.BASE_DIR)
     self.assertEquals(base_dir, 'evaluation_system')
     self.failUnlessRaises(config.ConfigurationException, config.get,
                           'non-existing-key')
     self.assertEquals(config.get('non-existing-key', 'default-answer'),
                       'default-answer')
Beispiel #6
0
 def test_reload(self):
     """Test we can reload the configuration"""
     config._config[config.BASE_DIR_LOCATION] = 'TEST'
     c1 = config.get(config.BASE_DIR_LOCATION)
     self.assertEquals(c1, 'TEST')
     config.reloadConfiguration()
     c2 = config.get(config.BASE_DIR_LOCATION)
     self.assertNotEquals(c1, c2)
Beispiel #7
0
    def linkmydata(self,outputdir=None):
        """Link the CMOR Data Structure of any output created by a tool
           crawl the directory and ingest the directory with solr::
            :param outputdir: cmor outputdir that where created by the tool.
            :return: nothing
        """
        user = self._user
        workpath  = os.path.join(user.getUserBaseDir(),'CMOR4LINK')
        rootpath  = config.get('project_data')
        solr_in   = config.get('solr.incoming')
        solr_bk   = config.get('solr.backup')
        solr_ps   = config.get('solr.processing')
        
        # look for tool in tool
        toolintool = re.compile(r'^((?P<tool>[\w%]+)%(\d+|none)%(?P<project>[\w_]+)%(?P<product>[\w_]+)$)')
        # Maybe os.walk for multiple projects or products
        if len(os.listdir(outputdir)) == 1:
            project = os.listdir(outputdir)[0]
            # link?
        if len(os.listdir(os.path.join(outputdir,project))) == 1:
            product = os.listdir(os.path.join(outputdir,project))[0]
        new_product = '%s.%s.%s.%s' % (self.__class__.__name__.lower(),self.rowid,project,product)
        if re.match(toolintool,product):
            nproduct = re.match(toolintool,product).group('product')
            nproject = re.match(toolintool,product).group('project')
            ntool    = '.%s' % re.match(toolintool,product).group('tool')
            new_product = '%s.s%s.%s.%s' % (self.__class__.__name__.lower(),ntool,self.rowid,nproject,nproduct)

        # Link section
        link_path = os.path.join(rootpath,'user-' + user.getName())
        if os.path.islink(link_path):
            if not os.path.exists(link_path):
                os.unlink(link_path)
                os.symlink(workpath, os.path.join(link_path))
                if not os.path.isdir(workpath): os.makedirs(workpath)
            workpath = os.path.join(os.path.dirname(link_path), os.readlink(link_path))
        else:
           if not os.path.isdir(workpath): os.makedirs(workpath)
           os.symlink(workpath, link_path)
        os.symlink(os.path.join(outputdir,project,product), os.path.join(workpath,new_product))
        
        # Prepare for solr
        crawl_dir=os.path.join(link_path,new_product)
        now = datetime.now().strftime('%Y-%m-%d_%H%M%S')
        output = os.path.join(solr_in,'solr_crawl_%s.csv.gz' %(now))
        
        # Solr part with move orgy
        SolrCore.dump_fs_to_file(crawl_dir, output)
        shutil.move(os.path.join(solr_in,output),os.path.join(solr_ps,output))
        hallo = SolrCore.load_fs_from_file(dump_file=os.path.join(solr_ps,output))
        shutil.move(os.path.join(solr_ps,output),os.path.join(solr_bk,output))
Beispiel #8
0
 def setUp(self):
     os.environ['EVALUATION_SYSTEM_CONFIG_FILE'] = os.path.dirname(
         __file__) + '/test.conf'
     config.reloadConfiguration()
     self.solr_port = config.get('solr.port')
     self.solr_host = config.get('solr.host')
     # test instances, check they are as expected
     self.all_files = SolrCore(core='files',
                               host=self.solr_host,
                               port=self.solr_port)
     self.latest = SolrCore(core='latest',
                            host=self.solr_host,
                            port=self.solr_port)
     self.assertEquals(self.all_files.status()['index']['numDocs'], 0)
     self.assertEquals(self.latest.status()['index']['numDocs'], 0)
Beispiel #9
0
    def test_config_file(self):
        """If a config file is provided it should be read"""
        import tempfile
        fd, name = tempfile.mkstemp(__name__, text=True)
        with os.fdopen(fd, 'w') as f:
            f.write('[evaluation_system]\n%s=nowhere\n' % config.BASE_DIR)

        self.assertEquals(config.get(config.BASE_DIR), 'evaluation_system')
        os.environ[config._DEFAULT_ENV_CONFIG_FILE] = name
        config.reloadConfiguration()
        self.assertEquals(config.get(config.BASE_DIR), 'nowhere')

        os.unlink(name)

        # check wrong section
        fd, name = tempfile.mkstemp(__name__, text=True)
        with os.fdopen(fd, 'w') as f:
            f.write('[wrong_section]\n%s=nowhere\n' % config.BASE_DIR)

        os.environ[config._DEFAULT_ENV_CONFIG_FILE] = name
        self.failUnlessRaises(ConfigurationException, reloadConfiguration)

        os.unlink(name)

        # check directory structure value
        fd, name = tempfile.mkstemp(__name__, text=True)
        with os.fdopen(fd, 'w') as f:
            f.write('[evaluation_system]\n%s=wrong_value\n' %
                    config.DIRECTORY_STRUCTURE_TYPE)

        os.environ[config._DEFAULT_ENV_CONFIG_FILE] = name
        self.failUnlessRaises(ConfigurationException, reloadConfiguration)

        os.unlink(name)

        # check $EVALUATION_SYSTEM_HOME get's resolved properly
        fd, name = tempfile.mkstemp(__name__, text=True)
        with os.fdopen(fd, 'w') as f:
            f.write('[evaluation_system]\n%s=$EVALUATION_SYSTEM_HOME\n' %
                    config.BASE_DIR)

        self.assertEquals(config.get(config.BASE_DIR), 'evaluation_system')
        os.environ[config._DEFAULT_ENV_CONFIG_FILE] = name
        config.reloadConfiguration()
        self.assertEquals(config.get(config.BASE_DIR),
                          '/'.join(__file__.split('/')[:-4]))

        os.unlink(name)
Beispiel #10
0
def getVersion(src_file):

    retval = __version_cache.get(src_file, None)

    if retval is None:
        (dir_name, filename) = os.path.split(src_file)
        command = 'module load git > /dev/null 2> /dev/null;'
        if dir_name:
            command += 'cd %s 2> /dev/null;' % dir_name
        command += 'git config --get remote.origin.url;'
        command += 'git show-ref --heads --hash'
        options = config.get(config.GIT_BASH_STARTOPTIONS, '-lc')
        bash = ['/bin/bash', options, command]
        p = Popen(bash, stdout=PIPE, stderr=PIPE)

        (stdout, stderr) = p.communicate()

        try:
            lines = stdout.split('\n')
            repository = lines[-3]
            version = lines[-2]
        except Exception, e:
            if not stderr:
                stderr = str(e)

            log.warn("Could not read git version")
            log.debug("Error while reading git version:\n%s", stderr)

            repository = 'unknown'
            version = 'unknown'

        retval = (repository, version)
        __version_cache[src_file] = retval
Beispiel #11
0
    def test_getters(self):
        """Test the object creation and some basic return functions"""
        self.assertEqual(Test.DUMMY_USER['pw_name'], self.user.getName())
        self.assertTrue(self.user.getUserHome().startswith(
            tempfile.gettempdir()))
        self.assertEqual(int(Test.runCmd('id -u')), self.user.getUserID())

        db = self.user.getUserDB()
        self.assertTrue(db is not None)
        baseDir = '/'.join(
            [self.user.getUserHome(),
             config.get(config.BASE_DIR)])
        self.assertEqual(baseDir, self.user.getUserBaseDir())
        tool1_cnfDir = os.path.join(baseDir, User.CONFIG_DIR, 'tool1')
        tool1_chDir = os.path.join(baseDir, User.CACHE_DIR, 'tool1')
        tool1_outDir = os.path.join(baseDir, User.OUTPUT_DIR, 'tool1')
        tool1_plotDir = os.path.join(baseDir, User.PLOTS_DIR, 'tool1')

        self.assertEqual(self.user.getUserScratch(),
                         '/tmp/scratch/%s' % self.user.getName())
        #check we get the configuration directory of the given tool
        self.assertEqual(tool1_cnfDir, self.user.getUserConfigDir('tool1'))
        self.assertEqual(tool1_chDir, self.user.getUserCacheDir('tool1'))
        self.assertEqual(tool1_outDir, self.user.getUserOutputDir('tool1'))
        self.assertEqual(tool1_plotDir, self.user.getUserPlotsDir('tool1'))
        #check we get the general directory of the tools (should be the parent of the previous one)
        self.assertEqual(os.path.dirname(tool1_cnfDir),
                         self.user.getUserConfigDir())
        self.assertEqual(os.path.dirname(tool1_chDir),
                         self.user.getUserCacheDir())
        self.assertEqual(os.path.dirname(tool1_outDir),
                         self.user.getUserOutputDir())
        self.assertEqual(os.path.dirname(tool1_plotDir),
                         self.user.getUserPlotsDir())
Beispiel #12
0
def _preview_create(plugin_name, result):
    """
    This routine creates the preview. And adds the created files
    to the result dictionary.
    :type plugin_name: str
    :param plugin_name: name of the referred plugin.
    :type result: meta_dict
    :param result: a meta dictionary describing the result files
    """
    
    todo_list = []
    result_list = []
    for file_name in result:
        metadata = result[file_name]
        todo = metadata.get('todo', '')

        if todo == 'copy':
            ext = os.path.splitext(file_name)[-1]
            target_name = _preview_unique_file(plugin_name, file_name, ext, metadata)
            todo_list.append((_preview_copy, file_name, target_name))
            metadata['preview_path'] = target_name
            result_list.append(target_name)
        elif todo == 'convert':
            target_name = _preview_unique_file(plugin_name, file_name, '.png', metadata)
            todo_list.append((_preview_convert, file_name, target_name))
            metadata['preview_path'] = target_name
            result_list.append(target_name)
        result[file_name] = metadata
        
    preview_path = config.get(config.PREVIEW_PATH)

    if preview_path.strip() and todo_list:
        p = Pool(config.NUMBER_OF_PROCESSES)
        p.map(utils.mp_wrap_fn, todo_list)
    return result_list
Beispiel #13
0
def _preview_unique_file(plugin_name, file_name, ext, metadata):
    """
    This routine creates a unique filename for the preview
    :type plugin_name: str
    :param plugin_name: name of the referred plugin.
    :type file_name: str
    :param file_name: the file to create a preview name for
    :type ext: str
    :param ext: the extension of the file to be created
    :type metadata: dict
    :param metadata: the meta-data for the file, to access timestamp
    """
    path = config.get(config.PREVIEW_PATH)
    subdir = datetime.datetime.now().strftime('%Y%m%d')
    name = _preview_generate_name(plugin_name, file_name, metadata)
    name += ext
    full_path = os.path.join(path, subdir)
    full_name = os.path.join(full_path, name)
    
    if path.strip() and not os.path.isdir(full_path):
        utils.supermakedirs(full_path, 0777)
        
    if os.path.isfile(full_name):
        return _preview_unique_file(plugin_name, file_name, ext, metadata)
    
    return full_name
Beispiel #14
0
    def setUp(self):
        os.environ['EVALUATION_SYSTEM_CONFIG_FILE'] = os.path.dirname(
            __file__) + '/test.conf'
        config.reloadConfiguration()
        self.solr_port = config.get('solr.port')
        self.solr_host = config.get('solr.host')
        # test instances, check they are as expected
        self.all_files = SolrCore(core='files',
                                  host=self.solr_host,
                                  port=self.solr_port)
        self.latest = SolrCore(core='latest',
                               host=self.solr_host,
                               port=self.solr_port)
        self.assertEquals(self.all_files.status()['index']['numDocs'], 0)
        self.assertEquals(self.latest.status()['index']['numDocs'], 0)

        # add some files to the cores
        supermakedirs('/tmp/some_temp_solr_core/', 0777)
        self.tmpdir = '/tmp/some_temp_solr_core'
        self.orig_dir = DRSFile.DRS_STRUCTURE[CMIP5]['root_dir']
        DRSFile.DRS_STRUCTURE[CMIP5]['root_dir'] = self.tmpdir

        self.files = [
            'cmip5/output1/MOHC/HadCM3/historical/mon/aerosol/aero/r2i1p1/v20110728/wetso2/wetso2_aero_HadCM3_historical_r2i1p1_190912-193411.nc',
            'cmip5/output1/MOHC/HadCM3/decadal2008/mon/atmos/Amon/r9i3p1/v20120523/tauu/tauu_Amon_HadCM3_decadal2008_r9i3p1_200811-201812.nc',
            'cmip5/output1/MOHC/HadCM3/decadal2009/mon/atmos/Amon/r7i2p1/v20110719/ua/ua_Amon_HadCM3_decadal2009_r7i2p1_200911-201912.nc',
            'cmip5/output1/MOHC/HadCM3/decadal2009/mon/atmos/Amon/r7i2p1/v20110819/ua/ua_Amon_HadCM3_decadal2009_r7i2p1_200911-201912.nc',
            'cmip5/output1/MOHC/HadCM3/decadal2009/mon/atmos/Amon/r7i2p1/v20110419/ua/ua_Amon_HadCM3_decadal2009_r7i2p1_200911-201912.nc'
        ]
        for f in self.files:
            abs_path = os.path.abspath(os.path.join(self.tmpdir, f))
            try:
                os.makedirs(os.path.dirname(abs_path))
            except:  # pragma nocover
                pass
            with open(abs_path, 'w') as f_out:
                f_out.write(' ')
        dump_file = self.tmpdir + '/dump1.csv'
        # add the files to solr
        SolrCore.dump_fs_to_file(self.tmpdir + '/cmip5', dump_file)
        SolrCore.load_fs_from_file(dump_file,
                                   abort_on_errors=True,
                                   core_all_files=self.all_files,
                                   core_latest=self.latest)

        self.fn = os.path.join(self.tmpdir, self.files[0])
        self.drs = DRSFile.from_path(self.fn)
Beispiel #15
0
    def getUserScratch(self):
        """:returns: the path to the user's scratch directory.
:rtype: str"""  
        path = config.get(config.SCRATCH_DIR)

        path = path.replace('$USER', self.getName())

        return path
Beispiel #16
0
    def __init__(self,
                 core=None,
                 host=None,
                 port=None,
                 echo=False,
                 instance_dir=None,
                 data_dir=None,
                 get_status=True):
        """Create the connection pointing to the proper solr url and core.

:param core: The name of the core referred (default: loaded from config file)
:param host: the hostname of the Solr server (default: loaded from config file)
:param port: The port number of the Solr Server (default: loaded from config file)
:param echo: If True, show all urls before issuing them.
:param instance_dir: the core instance directory (if empty but the core exists it will get downloaded from Solr)
:param data_dir: the directory where the data is being kept (if empty but the core exists it will
get downloaded from Solr)"""

        if host is None:
            host = config.get(config.SOLR_HOST)
        if port is None:
            port = config.get(config.SOLR_PORT)
        if core is None:
            core = config.get(config.SOLR_CORE)

        self.solr_url = 'http://%s:%s/solr/' % (host, port)
        self.core = core
        self.core_url = self.solr_url + core + '/'
        self.echo = echo
        self.instance_dir = instance_dir
        self.data_dir = data_dir

        if get_status:
            st = self.status()
        else:
            st = {}
        if self.instance_dir is None and 'instanceDir' in st:
            self.instance_dir = st['instanceDir']
        if self.data_dir is None and 'dataDir' in st:
            self.data_dir = st['dataDir']
        else:
            self.data_dir = 'data'

        # Other Defaults
        import socket
        socket.setdefaulttimeout(20)
Beispiel #17
0
    def _run(self):

        doc_path = self.args.docpath
        tex_file = self.args.tex_file
        tool = self.args.tool.lower()
        if not tex_file:
            print 'Can\'t find a .tex file in this directory!'
            return
        file_root = tex_file.split('.')[0]
        # copy folder to /tmp for processing
        new_path = '/tmp/%s/' % tool
        self.copy_and_overwrite(doc_path, new_path)

        # change path and run "htlatex" and "bibtex"
        os.chdir(new_path)
        cfg_file = os.path.dirname(
            __file__) + '/../../../../etc/ht5mjlatex.cfg'
        os.system('htlatex %s "%s"' % (new_path + tex_file, cfg_file))
        os.system('bibtex %s' % file_root)
        os.system('htlatex %s "%s"' % (new_path + tex_file, cfg_file))

        # open html file and remove <head> and <body> tags
        fi = open(os.path.join(new_path, file_root + '.html'))
        text = fi.read()
        text = re.sub("<head>.*?</head>", "", text, flags=re.DOTALL)
        text = text.replace('</html>', '')
        text = text.replace('<html>', '')
        text = text.replace('</body>', '')
        text = text.replace('<body>', '')

        figure_prefix = 'figures'
        # replace img src
        text = text.replace(
            'src="%s/' % figure_prefix,
            'style="width:80%;" src="/static/preview/doc/' + tool + '/')

        # remove too big sigma symbols
        text = text.replace('mathsize="big"', '')

        flat_page, created = FlatPage.objects.get_or_create(
            title=self.args.tool, url='/about/%s/' % tool)
        if created:
            flat_page.sites = [1]
        flat_page.content = text
        flat_page.save()

        # Copy images to website preview path
        preview_path = config.get('preview_path')
        dest_dir = os.path.join(preview_path, 'doc/%s/' % tool)
        self.copy_and_overwrite('%s/' % figure_prefix, dest_dir)
        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)
        shutil.copyfile('%s.css' % tool, os.path.join(dest_dir,
                                                      '%s.css' % tool))
        # remove tmp files
        shutil.rmtree(new_path)
Beispiel #18
0
    def __module_interaction(self, command, module_name):
        """
        Function to interact with the module interface
        """
        module_path = config.get('module_path', None)
        if module_path is None:
            logging.warning('Module path is not set. Module %s NOT %sed' % (module_name, command))
            return None

        cmd = os.popen('%s python %s %s' % (module_path, command, module_name))
        exec cmd in globals(), locals()
        return True
Beispiel #19
0
    def __init__(self):

        self.logcache = config.get('esgf_logcache')
        # self.logcache= '/pf/b/b324029/cache'
        self.esgftmp = '%s/%s/' % (self.logcache, 'ESGF_CACHE')
        self.logpath = '%s/%s/' % (self.logcache, 'ESGF_LOG')
        self.esgf_server = config.get('esgf_server').split(',')
        self.hostname = socket.gethostname()

        self.certs = config.get('private_key')
        self.wget = config.get('wget_path')

        self.threadLimiter = threading.BoundedSemaphore(
            int(config.get('parallel_downloads')))
        self.rwlock = threading.Lock()

        self.gid = grp.getgrnam("bmx828").gr_gid
        try:
            os.makedirs(self.logpath, 0775)
        except OSError as exception:
            if exception.errno != errno.EEXIST:
                raise
Beispiel #20
0
    def __init__(self, project, experiment, outpath, p2p=P2P()):
        self.project = {'project': project}
        self.experiment = experiment
        self.outpath = outpath
        self.show_facets = 'experiment'

        self.facets = {'project': self.project.values()[0], 'type': 'File'}
        self.fields = [
            'title', 'size', 'project', 'product', 'institute', 'model',
            'experiment', 'time_frequency', 'realm', 'variable', 'ensemble',
            'timestamp'
        ]
        self.prefix = 'esgf-'
        self.prepath = config.get('project_data')
Beispiel #21
0
    def storeResults(self, rowid, results):
        """
        :type rowid: integer
        :param rowid: the row id of the history entry where the results belong to
        :type results: dict with entries {str : dict} 
        :param results: meta-dictionary with meta-data dictionaries assigned to the file names.
        """
        reg_ex = None

        # regex to get the relative path
        preview_path = config.get(config.PREVIEW_PATH, None)
        expression = '(%s\\/*){1}(.*)' % re.escape(preview_path)

        # only try to create previews, when a preview path is given
        if preview_path:
            reg_ex = re.compile(expression)

        for file_name in results:
            metadata = results[file_name]

            type_name = metadata.get('type', '')
            type_number = hist.Result.Filetype.unknown

            preview_path = metadata.get('preview_path', '')
            preview_file = ''

            if preview_path and reg_ex is not None:
                # We store the relative path for previews only.
                # Which allows us to move the preview files to a different folder.
                preview_file = reg_ex.match(preview_path).group(2)

            if type_name == 'plot':
                type_number = hist.Result.Filetype.plot
            elif type_name == 'data':
                type_number = hist.Result.Filetype.data

            h = hist.Result(history_id_id=rowid,
                            output_file=file_name,
                            preview_file=preview_file,
                            file_type=type_number)

            h.save()

            result_id = h.pk
            self._storeResultTags(result_id, metadata)
Beispiel #22
0
    def __init__(self, uid = None, email = None):
        '''Creates a user object for the provided id. If no id is given, a user object for
the current user, i.e. the one that started the application, is created instead.

:type uid: int
:param uid: user id in the local system, if not provided the current user is used.
:type email: str
:param email: user's email address
'''
        self._dir_type = config.get(config.DIRECTORY_STRUCTURE_TYPE)

        if uid is None: 
            uid = os.getuid()
            
        self._userdata = None
        if isinstance(uid, basestring):
            self._userdata = pwd.getpwnam(uid)
        else:
            self._userdata = pwd.getpwuid(uid)
             
        if self._userdata is None:
            raise Exception("Cannot find user %s" % uid)
        
        if email is None:
            self._email = ''
        else:
            self._email = email
        
        self._userconfig = Config()
        #try to load teh configuration from the very first time.
        self._userconfig.read([User.EVAL_SYS_DEFAULT_CONFIG, os.path.join(self._userdata.pw_dir, User.EVAL_SYS_CONFIG)])
        
        self._db = UserDB(self)
        
        row_id = self._db.getUserId(self.getName())

        if row_id:
            try:
                self._db.updateUserLogin(row_id, email)
            except:
                raise
                pass
        else:
            self._db.createUser(self.getName(), email=self._email)
Beispiel #23
0
    def set_default_options(self, user, cmdstring, outdir=None):
        """
        Sets the default options for a given user and a
        given command string.
        :param user: an user object
        :type user: evaluation_system.model.user.User 
        :param cmdstring: the command
        :type cmdstring: string
        """
        # read output directory from configuration
        if not outdir:
            outdir = user.getUserSchedulerOutputDir()
        email = user.getEmail()

        # we check if the user is external and activate batch mode
        django_user = User.objects.get(username=user.getName())
        if django_user.groups.filter(name=config.get(
                'external_group', 'noexternalgroupset')).exists():
            options = config.get_section('scheduler_options_extern')
        elif django_user.groups.filter(name='frevastud').exists():
            options = config.get_section('scheduler_options_frevastud')
        else:
            options = config.get_section('scheduler_options')
        self.scheduler_options = options
        # set the default options
        self.add_dash_option("D", outdir)
        if email:
            self.add_ddash_option("mail-user", email)
        self.set_cmdstring(cmdstring)

        self.source_file = options.pop('source', False)
        module_file = options.pop('module_command')
        self.add_module(module_file)

        for opt, val in options.iteritems():
            if opt.startswith('option_'):
                opt = opt.replace('option_', '')
                if val == 'None':
                    self.add_ddash_option(opt, None)
                else:
                    self.add_ddash_option(opt, val)
Beispiel #24
0
    def test_preview_generation(self):
        d = '/tmp/preview.pdf'
        s = os.path.dirname(__file__) + '/test_output/vecap_test_output.pdf'
        pm._preview_copy(s, d)
        self.assertTrue(os.path.isfile(d))
        os.remove(d)
        d = '/tmp/preview.png'
        s = os.path.dirname(__file__) + '/test_output/test_image.png'
        pm._preview_copy(s, d)
        self.assertTrue(os.path.isfile(d))
        os.remove(d)

        d = '/tmp/preview.png'
        s = os.path.dirname(__file__) + '/test_output/test_image.png'
        pm._preview_convert(s, d)
        self.assertTrue(os.path.isfile(d))
        os.remove(d)

        r = pm._preview_generate_name('dummy', 'old_fn', {})
        self.assertIn('dummy', r)
        self.assertEqual(len(r), 14)
        ts = time.time()
        r = pm._preview_generate_name('murcss', 'old_fn', {'timestamp': ts})
        self.assertIn('murcss', r)
        self.assertIn(
            datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d_%H%M%S'), r)

        u = pm._preview_unique_file('murcss', 'old_fn', 'pdf',
                                    {'timestamp': ts})
        self.assertIn(
            datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d_%H%M%S'), u)
        self.assertIn('murcss', u)
        self.assertIn(config.get('preview_path'), u)

        r1 = os.path.dirname(__file__) + '/test_output/vecap_test_output.pdf'
        r2 = os.path.dirname(__file__) + '/test_output/test_image.png'
        result = {r1: {'todo': 'copy'}, r2: {'todo': 'convert'}}
        res = pm._preview_create('murcss', result)
        for r in res:
            self.assertTrue(os.path.isfile(r))
            os.remove(r)
Beispiel #25
0
    def test_plugin_conf(self):
        import tempfile
        fd, name = tempfile.mkstemp(__name__, text=True)
        with os.fdopen(fd, 'w') as f:
            f.write("""
[evaluation_system]
base_dir=~

[plugin:pca]
plugin_path=$EVALUATION_SYSTEM_HOME/tool/pca
python_path=$EVALUATION_SYSTEM_HOME/tool/pca/integration
module=pca.api

[plugin:climval]
plugin_path=$EVALUATION_SYSTEM_HOME/tool/climval
python_path=$EVALUATION_SYSTEM_HOME/tool/climval/src
module=climval.tool

""")

        os.environ[config._DEFAULT_ENV_CONFIG_FILE] = name
        config.reloadConfiguration()
        plugins_dict = config.get(config.PLUGINS)
        self.assertEquals(set(plugins_dict), set(['pca', 'climval']))
        es_home = '/'.join(__file__.split('/')[:-4])
        self.assertEquals(config.get_plugin('pca', config.PLUGIN_PATH),
                          es_home + '/tool/pca')
        self.assertEquals(config.get_plugin('pca', config.PLUGIN_PYTHON_PATH),
                          es_home + '/tool/pca/integration')
        self.assertEquals(config.get_plugin('pca', config.PLUGIN_MODULE),
                          'pca.api')
        self.assertEquals(
            config.get_plugin('pca', 'not_existing', 'some_default'),
            'some_default')

        self.assertEquals(config.get_plugin('climval', config.PLUGIN_MODULE),
                          'climval.tool')
        os.unlink(name)
Beispiel #26
0
    def _run(self):
        # defaults
        options = self.args
        last_args = self.last_args
        
        # check if tool is specified
        try:
            tool_name = last_args[0]
        except IndexError:
            return self.list_tools()

        # here we handle the tool pull request
        if options.pull_request:
            self.handle_pull_request(tool_name)
            return 0

        if options.repos_version:
            (repos, version) = pm.getPluginVersion(tool_name)
            print 'Repository and version of %s:\n%s\n%s' % (tool_name, repos, version)
            return 0    
        
        email = None
        
        unique_output = options.unique_output.lower() if options.unique_output else 'true'
        unique_output = unique_output not in ['false', '0', 'no']
        
        mode = options.batchmode.lower() if options.batchmode else 'false'
        batchmode = mode in ['true', '1', 'yes', 'on', 'web']
        if not batchmode and mode not in ['false', '0', 'no', 'off']:
            raise ValueError('batchmode should be set to one of those {1,0, true, false, yes, no, on, off}')  
        
        # get the plugin
        if tool_name:
            caption = None
            
            if options.caption:
                caption = pm.generateCaption(options.caption, tool_name)    
            
            if options.save_config or options.save:
                tool_dict = pm.parseArguments(tool_name, self.last_args[1:])
                cfg_file_save = options.save_config 
                save_in = pm.writeSetup(tool_name, tool_dict, config_file=cfg_file_save)
                logging.info("Configuration file saved in %s", save_in)
            elif options.show_config:
                tool_dict = pm.parseArguments(tool_name, self.last_args[1:])
                print pm.getPluginInstance(tool_name).getCurrentConfig(config_dict=tool_dict)
           
            elif options.scheduled_id:
                scheduled_id = options.scheduled_id
                logging.debug('Running %s as scheduled in history with ID %i', tool_name, scheduled_id)
                if not options.dry_run: 
                    pm.runTool(tool_name, scheduled_id=scheduled_id,
                               unique_output=unique_output)
                                     
            else:
                # now run the tool
                (error, warning) = pm.getErrorWarning(tool_name)
                
                if warning:
                    log.warning(warning)
                    
                if error:
                    log.error(error)

                tool_dict = pm.parseArguments(tool_name, self.last_args[1:])
                
                logging.debug('Running %s with configuration: %s', tool_name, tool_dict)
                if not options.dry_run and (not error or DEBUG):
                    
                    # we check if the user is external and activate batchmode
                    django_user = User.objects.get(username=user.User().getName())
                    if django_user.groups.filter(name=config.get('external_group', 'noexternalgroupset')).exists():
                        batchmode = True
                    
                    if batchmode:
                        [id, file] = pm.scheduleTool(tool_name,
                                                     config_dict=tool_dict,
                                                     user=user.User(email=email),
                                                     caption=caption,
                                                     unique_output=unique_output)

                        print 'Scheduled job with history id', id
                        print 'You can view the job\'s status with the command squeue'
                        print 'Your job\'s progress will be shown with the command'
                        print 'tail -f ', file
                    else:
                        pm.runTool(tool_name, config_dict=tool_dict,
                                   caption=caption, unique_output=unique_output)
                        
                        # repeat the warning at the end of the run
                        # for readability don't show the warning in debug mode 
                        if warning and not DEBUG:
                            log.warning(warning)

            if self.DEBUG:
                logging.debug("Arguments: %s", self.last_args)
                import json
                logging.debug('Current configuration:\n%s', json.dumps(tool_dict, indent=4))
Beispiel #27
0
from django.conf import settings
from evaluation_system.misc import config

import json,time
import django

SETTINGS = dict()

try:
    # Application definition
    SETTINGS['INSTALLED_APPS'] = (
        'django.contrib.flatpages',
	'django.contrib.auth',  # We need this to access user groups
    )
    SETTINGS['DATABASES'] = {
        'default': {
            'ENGINE': 'django.db.backends.mysql',
            'NAME': config.get(config.DB_DB),
            'USER': config.get(config.DB_USER),
            'PASSWORD': config.get(config.DB_PASSWD),
            'HOST': config.get(config.DB_HOST),   # Or an IP Address that your DB is hosted on
            'PORT': '3306',

        }
    }
    settings.configure(**SETTINGS)
    django.setup()
except:
    pass
Beispiel #28
0
 def is_admin(self):
     if self.__is_admin is None:
         admins = config.get('admins', '')
         # if user.User().getName() in admins.split(','):
         self.__is_admin = user.User().getName() in admins.split(',')
     return self.__is_admin
Beispiel #29
0
def reloadPlugins(user_name=None):
    """Reload all plug-ins. Plug-ins are then loaded first from the :class:`PLUGIN_ENV` environmental
variable and then from the configuration file. This means that the environmental variable has precedence
and can therefore overwrite existing plug-ins (useful for debugging and testing)."""
    if not user_name:
        user_name = User().getName()
    # reset all current plugins
#     for item in __plugins_meta.keys():
#         __plugins_meta.pop(item)
#     for item in __plugin_modules__.keys():
#         __plugin_modules__.pop(item)
#     for item in __plugins__.keys():
#         __plugins__.pop(item)
    __plugin_modules__ = py27.OrderedDict()  # we use a ordered dict. This allows to override plugins
    __plugins__ = {}
    __plugins_meta = {}
    __plugin_modules_user__[user_name] = py27.OrderedDict()
    __plugins_user__[user_name] = py27.OrderedDict()
    __plugins_meta_user[user_name] = py27.OrderedDict()
    
    extra_plugins = list()
    if PLUGIN_ENV in os.environ:
        # now get all modules loaded from the environment
        for path, module_name in map(lambda item: tuple([e.strip() for e in item.split(',')]),
                                     os.environ[PLUGIN_ENV].split(':')):
            # extend path to be exact by resolving all "user shortcuts" (e.g. '~' or '$HOME')
            path = os.path.abspath(os.path.expandvars(os.path.expanduser(path)))
            if os.path.isdir(path):
                # we have a plugin_imp with defined api
                sys.path.append(path)
                # TODO this is not working like in the previous loop. Though we might just want to remove it,
                # as there seem to be no use for this info...
                __plugin_modules__[module_name] = os.path.join(path, module_name)
                extra_plugins.append(module_name)
            else:
                log.warn("Cannot load %s, directory missing: %s", module_name, path)

    # the same for user specific env variable
    if user_name:
        if PLUGIN_ENV+'_'+user_name in os.environ:
            # now get all modules loaded from the environment
            for path, module_name in map(lambda item: tuple([e.strip() for e in item.split(',')]),
                                         os.environ[PLUGIN_ENV+'_'+user_name].split(':')):
                # extend path to be exact by resolving all "user shortcuts" (e.g. '~' or '$HOME')
                path = os.path.abspath(os.path.expandvars(os.path.expanduser(path)))
                if os.path.isdir(path):
                    # we have a plugin_imp with defined api
                    sys.path.append(path)
                    # TODO this is not working like in the previous loop. Though we might just want to remove it,
                    # as there seem to be no use for this info...
                    __plugin_modules__[module_name] = os.path.join(path, module_name)
                    extra_plugins.append(module_name)
                else:
                    log.warn("Cannot load %s, directory missing: %s", module_name, path)

    # get the tools directory from the current one
    # get all modules from the tool directory
    plugins = list(config.get(config.PLUGINS))
    for plugin_name in plugins:
        py_dir = config.get_plugin(plugin_name, config.PLUGIN_PYTHON_PATH)
        py_mod = config.get_plugin(plugin_name, config.PLUGIN_MODULE)
        if os.path.isdir(py_dir):
            if py_mod in __plugin_modules__:
                file_path = __plugin_modules__[py_mod]+'.py'
                log.warn("Module '%s' is test being overwritten by: %s", py_mod, file_path)
            else:
                log.debug("Loading '%s'", plugin_name)
                sys.path.append(py_dir)
                __plugin_modules__[plugin_name] = os.path.join(py_dir, py_mod)
        else:
            log.warn("Cannot load '%s' directory missing: %s", plugin_name, py_dir)

    # new way of loading plugins
    import re
    reg = re.compile(r'__short_description__\s*=(.*)')
    r = re.compile(r'\'(.*)\'')
    r_2 = re.compile(r'\"(.*)\"')
    r_list = re.compile(r'\[(.*)\]')
    reg_class_name = re.compile(r'class\s*(.*)')
    # reg for categories
    cat_reg = re.compile(r'__category__\s*=(.*)')
    # reg for tags
    tag_reg = re.compile(r'__tags__\s*=(.*)')
    for plugin_name, plugin_mod in __plugin_modules__.iteritems():
        f = open(plugin_mod+'.py', 'r')
        description = None
        class_name = None
        category = None
        tags = None

        class_name_str = ''
        for line in f:
            description = re.search(reg, line)
            if description is not None:
                description_str = re.search(r, description.groups()[0])
                if description_str is None:
                    description_str = re.search(r_2, description.groups()[0])
                if description_str is not None:
                    description_str = description_str.groups()[0]
            # search for category
            category_search = re.search(cat_reg, line)
            if category_search:
                category = re.search(r, category_search.groups()[0])
                category = category.groups()[0]
            # search for tags
            tags_search = re.search(tag_reg, line)
            if tags_search:
                tags = re.search(r_list, tags_search.groups()[0])
                tags = list(eval(tags.groups()[0]))

            # search for classname
            class_name = re.search(reg_class_name, line)
            if class_name is not None:
                # TODO: Maybe this is not robust enough.
                # What if class inherits from other Base Class?
                if 'PluginAbstract' in class_name.groups()[0]:
                    class_name_str = re.sub(r'\(.*', '', class_name.groups()[0])
        if class_name_str != ''  and class_name_str.lower() not in __plugins_meta.keys():
            __plugins_meta[class_name_str.lower()] = dict(name=class_name_str,
                                                          plugin_class=class_name_str,
                                                          plugin_module=plugin_mod,
                                                          description=description_str,
                                                          user_exported=plugin_name in extra_plugins,
                                                          category=category,
                                                          tags=tags)
            __plugins__[class_name_str] = class_name_str
        elif class_name_str != '':
            log.warn("Default plugin %s is being overwritten by: %s",
                     class_name_str, __plugins_meta[class_name_str.lower()]['plugin_module']+'.py')
    sys.path = [p for p in munge(sys.path)]  
    
    __plugin_modules_user__[user_name] = __plugin_modules__
    __plugins_user__[user_name] = __plugins__
    __plugins_meta_user[user_name] = __plugins_meta
Beispiel #30
0
def scheduleTool(plugin_name, slurmoutdir=None, config_dict=None, user=None,
                 caption=None, unique_output=True):
    """Schedules  a tool and stores this "run" in the :class:`evaluation_system.model.db.UserDB`.
    
:type plugin_name: str
:param plugin_name: name of the referred plugin.
:type slurmoutdir: string 
:param slurmoutdir: directory for the output
:type config_dict: dict or metadict 
:param config_dict: The configuration used for running the tool. If is None, the default configuration will be stored, 
    this might be incomplete.
:type user: :class:`evaluation_system.model.user.User`
:param user: The user starting the tool
:type scheduled_id: int
:param scheduled_id: if the process is already scheduled then put the row id here
:type caption: str
:param caption: the caption to set.
"""
    
    plugin_name = plugin_name.lower()
    if user is None:
        user = User()
    
    p = getPluginInstance(plugin_name, user)
    complete_conf = None
    
    # check whether a scheduled id is given
    if config_dict is None:
        conf_file = user.getUserToolConfig(plugin_name)
        if os.path.isfile(conf_file):
            log.debug('Loading config file %s', conf_file)
            with open(conf_file, 'r') as f:
                complete_conf = p.readConfiguration(f)
        else:
            log.debug('No config file was found in %s', conf_file)
    if complete_conf is None:
        # at this stage we want to resolve or tokens and perform some kind of sanity check before going further
        complete_conf = p.setupConfiguration(config_dict=config_dict, recursion=True)

    log.debug('Schedule %s with %s', plugin_name, complete_conf)
    
    slurmindir = os.path.join(user.getUserSchedulerInputDir(), user.getName())
    if not os.path.exists(slurmindir):
        utils.supermakedirs(slurmindir, 0777)

    version_details = getVersion(plugin_name)
    rowid = user.getUserDB().storeHistory(p,
                                          complete_conf,
                                          user.getName(),
                                          History.processStatus.not_scheduled,
                                          version_details=version_details,
                                          caption=caption)
    
    # follow the notes
    followHistoryTag(rowid, user.getName(), 'Owner')

    # set the SLURM output directory
    if not slurmoutdir:
        slurmoutdir = user.getUserSchedulerOutputDir()
        slurmoutdir = os.path.join(slurmoutdir, plugin_name)

    if not os.path.exists(slurmoutdir):
        utils.supermakedirs(slurmoutdir, 0777)

    # write the SLURM file
    full_path = os.path.join(slurmindir, p.suggestSlurmFileName())
    with open(full_path, 'w') as fp:
        p.writeSlurmFile(fp,
                         scheduled_id=rowid,
                         user=user,
                         slurmoutdir=slurmoutdir,
                         unique_output=unique_output)
            
    # create the batch command
    command = ['/bin/bash',
               '-c',
               '%s %s %s\n' % (config.get('scheduler_command'),  # SCHEDULER_COMMAND,
                                        config.SCHEDULER_OPTIONS,
                                        #user.getName(),
                                        full_path)]

    # run this 
    logging.debug("Command: " + str(command))
    p = Popen(command, stdout=PIPE, stderr=STDOUT)
    (stdout, stderr) = p.communicate()

    logging.debug("scheduler call output:\n" + str(stdout))
    logging.debug("scheduler call error:\n" + str(stderr))
            
    # get the very first line only
    out_first_line = stdout.split('\n')[0]
            
    # read the id from stdout
    if out_first_line.split(' ')[0] == 'Submitted':
        slurm_id = int(out_first_line.split(' ')[-1])
    else:
        slurm_id = 0
        raise Exception('Unexpected scheduler output:\n%s' % out_first_line)
             
    slurm_out = os.path.join(slurmoutdir,
                             'slurm-%i.out' % slurm_id)
    
    # create a standard slurm file to view with less
    with open(slurm_out, 'w') as the_file:
        the_file.write('Certainly, your job is pending with id %i.\n' % slurm_id)
        the_file.write('You can get further information using the command squeue.\n')
        the_file.write('\nThis file was automatically created by the evaluation system.\n')
        the_file.write('It will be overwritten by the output of %s.\n' % plugin_name)
        
    # set the slurm output file 
    user.getUserDB().scheduleEntry(rowid, user.getName(), slurm_out)

    return rowid, slurm_out