Beispiel #1
0
def initialize( ):
    """
    initialize SLIMpy environment
    """
    env = InstanceManager()
    
    from slimpy_base.Environment.hashTable import _HashTable
    from slimpy_base.Environment.GlobalVars import GlobalVars as _GlobalVars
    from slimpy_base.Environment.GraphManager import GraphManager
    from slimpy_base.Environment.KeyStone import KeyStone
    from slimpy_base.Core.MutiProcessUtils.EmploymentCenter import EmploymentCenter
    from slimpy_base.utils.Records import Records
    
    #singleton class any and all instances are the same in the same environment
    env['graphmgr'] = GraphManager
    # order matters fifo for exit funcs 
    env['table'] = _HashTable
    env['record'] = Records
    env['slimvars'] = _GlobalVars
    env['keystone'] = KeyStone
    env['center'] = EmploymentCenter
    
    
    
    env.assure_new_instances(  )
Beispiel #2
0
def slimpy_variable_emitter(target, source, env):
    """
    add dependencies on SLIMpy global variables
    if a variable defined in slimvars and specified in
    env, then slimpy adds it as a Value source 
    """
    from slimpy_base.Environment.InstanceManager import InstanceManager
    slimvars = InstanceManager()['slimvars']

    for key in slimvars.keys():
        if env.has_key(key):
            par = env[key]
            val = env.Value((key, par))
            source.append(val)

    return target, source
Beispiel #3
0
def windowm_mpi_runtime_map(command):

    _env = InstanceManager()

    nodemap = _env['slimvars'].get_node_map()
    params = []

    nodes = [node for node, _ in nodemap]
    files = command.params
    dist = even_dist(nodes, files, 0)
    for (container, node_dist) in izip(files, dist):

        if isinstance(node_dist, tuple):
            from_node, to_node = node_dist
            assert from_node in container.nodenames
            container.node_copy(to_node)
        else:
            from_node, to_node = node_dist, node_dist

        assert container.available_from(to_node)
        if to_node in ['localhost', '']:
            to_node = getfqdn()
        params.append("%s:%s" % (to_node, container.get_data(to_node)))

    return params, command.kparams
Beispiel #4
0
class SLIMBuilder(object):
    """
    abstract class
    """
    #    log = Log()
    #    table = HashTable()
    #    slimvars = GlobalVars()

    env = InstanceManager()

    def isSource(self, val):
        """
        test is val is a DataContainer or id of a DataContainer and 
        if that DataContainer is full
        """

        try:

            node = self.env['table'][val]
        except KeyError:
            return False
        if isinstance(node, DataContainer):

            return node.isfull()

        return False
Beispiel #5
0
    def format(self, nodename, params, kparams):
        """
        creates a dictionary with the key "cmnd" 
        which is a string of all the 
        """
        env = InstanceManager()

        table = env['table']
        l = lambda a, b: str(a) + '="' + str(b) + '"'

        cs = [self.getCmnd()]
        push = cs.append

        for par in params:
            if isinstance(par, Node):
                src = table[par.getID()]
                par = src.get_data(nodename)
            push(par)

        for key, val in kparams.items():
            if isinstance(val, Node):
                val = val.get_data(nodename)
            push(l(key, val))
#        p = " ".join(map(str, params))
#        k =  " ".join(map(l , kparams.items()))
#        cs = " ".join([self.getCmnd(),p, k])

        return " ".join(map(str, cs))
Beispiel #6
0
    def __new_instance__(self, name):
        '''
        create a new instance
        '''
        Singleton.__new_instance__(self, name)

        self.lock = RLock()

        with self:
            self._jobs_posted = 0
            self._jobs_finished = 0
            self._done = False
            self._error = False
            self._error_message = None
            self._pids = set()
            self._aborted_pids = set()
            self._waiting_list = set()
            self._event = Event()
            self.__node_names = []
            self.nodes = {}
            self._idle = set()
            self._fin = set()
            self._head_node = ("__main__", "__main__")

            self.env = InstanceManager()

        self._semaphore = None
Beispiel #7
0
    def testprepend_datapath(self):
        env = InstanceManager()

        local_dp = 'local_dp'
        tmp_global_dp = 'tmp_global_dp'
        global_dp = 'global_dp'
        env['slimvars']['localtmpdir'] = local_dp
        env['slimvars']['globaltmpdir'] = tmp_global_dp
        env['slimvars']['datapath'] = global_dp

        itr = [
            [(None, None), None],
            [(False, None), None],
            [(True, None), None],
            [(None, True), None],
            [(True, True), local_dp],
            [(False, True), tmp_global_dp],
            [(None, False), global_dp],
            [(True, False), global_dp],
            [(False, False), global_dp],
        ]
        for (is_loc, is_tmp), ans in itr:

            res = Unix_Pipes.prepend_datapath(is_loc, is_tmp, 'com')
            if ans is None:
                self.assertFalse("DATAPATH" in res, "expected no datapath")
            else:
                self.assertTrue(ans in res, "expected %(ans) got %(res)s")
Beispiel #8
0
class Worker(Thread, Runner):
    """
    Worker runs jobs 
    """
    env = InstanceManager()

    def __init__(self, name, master_id, processor=0):

        self.master_id = master_id

        self.processor = processor
        self.env['center'].subscribe(name, processor)
        Thread.__init__(self, name=name)
        self.pid = 0

        self.thread_state = "init"

    def _get_ts(self):
        return self._thread_state

    def _set_ts(self, val):
        self._thread_state = val
        return
#        print self, val

    thread_state = property(_get_ts, _set_ts)

    ## print status of thread as a traceback
    def status(self):
        """
        print status of thread
        """
        if self.isAlive():
            f = _current_frames()[self._ident]
            traceback.print_stack(f)
        else:
            print "Not Alive"

    def run(self):
        '''
        run method needed by thread class
        calls self.main_loop
        '''
        self.thread_state = "running"
        self._ident = get_ident()
        self.env.current_env = self.master_id

        try:
            self.main_loop()
        except Exception, msg:
            #            print "worker calling error"
            self.env['center'].error = True, (str(self), str(msg))
            self.safe_release_lock()
            #            print self,'Thread Terminated'
            self.thread_state = "exit error"
            raise

        self.thread_state = "exit"

        return
Beispiel #9
0
class MultiCoreRunner(presStack):
    """
    Runs commands from the graph. uses two stacks to keep 
    track of dependencies and perhaps run several commands 
    at the same time.
    """
    env = InstanceManager()

    def __init__(self):

        self.created_nodes = set()
        self.nodes = None
        presStack.__init__(self)

    def set_graph(self, graph):
        """
        runner.set_graph( graph )
        sets the graph to graph 
        """
        presStack.set_graph(self, graph)
        center = self.env['center']
        center.reset()

        self.nodes = {}

        if len(center.node_names) == 0:
            raise Exception("No nodes to work on")

        for nodename in center.node_names:

            proc_list = self.nodes.setdefault(nodename, [])
            ppn = len(proc_list)

            worker = Worker(nodename, self.env.current_env, processor=ppn)
            proc_list.append(worker)

        center.set_event()

        assert not center.done

    def run(self):
        """
        required by Thread
        """
        center = self.env['center']

        try:
            self.main_loop()
        except Exception, msg:
            center.error = True, msg

            self.safe_release_lock()
            center.abort_all()

            self.join_all()
            raise

        self.join_all()
        center.reset()
Beispiel #10
0
def action(signum, stack_frame):
    '''
    action to catch alarm
    '''
    print
    print "WallTime exceded"
    print
    env = InstanceManager()
    env['center'].error = True
Beispiel #11
0
    def __init__(self, inSpace, num_blocks):
        env = InstanceManager()
        np = env['slimvars']['np']
        if prod(num_blocks) != np:
            raise Exception(
                "num_blocks does not match mpi variable 'np', %(num_blocks)s != %(np)s "
                % vars())

        kparams = dict(num_blocks=tuple(num_blocks))

        LinearOperatorStruct.__init__(self, inSpace, **kparams)
Beispiel #12
0
class Node(object):
    """
    Node class to represent data and commands in the hash table
    
    """

    env = InstanceManager()

    _objID = None
    __setstate__ = None
    name = "Node"

    def __init__(self, obj):
        """
        take any object and store it to the hashtable 
        while tracking it with this node instance
        """
        table = self.env['table']
        self._objID = table.append(obj)

    def get(self):
        """
        get object associated with this node from the hash Table
        """
        table = self.env['table']
        return table[self._objID]

    def getID(self):
        """
        return int id of node object
        """
        return self._objID

    id = property(getID)

    def __str__(self):
        return "(%s:%s)" % (self.name, self._objID)

    def __repr__(self):
        return self.__str__()

    def __getattr__(self, attr):

        return getattr(self.get(), attr)

    def __eq__(self, other):
        if isinstance(other, Node) and self.id == other.id:
            return True
        else:
            return False
Beispiel #13
0
class Runner(object):
    """
    Abstract class
    """
    #    log = Log()
    env = InstanceManager()
    cleaner = None
    created_nodes = None

    def set_graph(self, graph):
        raise NotImplementedError("Please use a subclass of RunnerBase")

#    def addSource(self, source):
#        """
#        add a source to the graph manager
#        """
#        self.env['graphmgr'].addSource(source)

    def add(self, commands):
        """
        simple helper function
        TODO: replace with reduce( lambda x,y:y+y,commands)
        """
        commands = list(commands)
        prev = commands.pop(0)
        if not commands:
            if prev.adder is None:
                return prev
            else:
                return prev + None

        while commands:
            next = commands.pop(0)
            prev = prev + next

        return prev

    def get_new_sources(self):
        if self.cleaner is None:
            cleaned_nodes = set()
        else:
            cleaned_nodes = self.cleaner.get_cleaned_nodes()

        if self.created_nodes is None:
            remaining = set()
        else:
            remaining = self.created_nodes.difference(cleaned_nodes)

        return remaining
Beispiel #14
0
class RecordHelper( object):
    
    env = InstanceManager()
    
    def __init__(self,verbose_level ,node, debug):
        
        self.msg_record_item = { 'verb':verbose_level,
                                 'node':node,
                                 'debug':debug}
        
    def write(self,msg):
        
        msg_record_item = self.msg_record_item.copy()
        msg_record_item['msg'] = msg
        
        self.env['record'].rec( msg_record_item )
Beispiel #15
0
    def __new_instance__(self, name):
        """
        simalar to init but for singleton
        initializes the class when  a new instance is created
        """

        Singleton.__new_instance__(self, name)

        #        self.log = Log()
        self.env = InstanceManager()

        self.graph = DiGraph()

        self.__breakpoints = set()
        self.__sources = set()
        self.__targets = set()

        self.runner = defaultRunner()
        self.builder = PipeBuilder()
Beispiel #16
0
    def build(self, target, source, env):

        #        env['runtype'] = 'dryrun'
        env['verbose'] = ARGUMENTS.get('verbose', 0)

        debug = ARGUMENTS.get('debug', None)
        if debug:
            debug = debug.split(',')
        else:
            debug = []
        env['debug'] = debug
        env['logfile'] = None

        runner = dottestRunner()

        from slimpy_base.Environment.InstanceManager import InstanceManager
        __env__ = InstanceManager()
        __env__['graphmgr'].setRunner(runner)

        self.act(target, source, env)
Beispiel #17
0
class BuilderBase(object):
    """
    abstract class
    """
    #    log = Log()
    #    table = HashTable()
    #    slimvars = GlobalVars()
    env = InstanceManager()

    def __init__(self):
        pass

    def build(self, g, targets, sources, breakpoints, **k):
        raise NotImplementedError(
            "Builer subclass must implement build method")

    def isSource(self, val):
        """
        test if val is a DataContainer or id of a DataContainer and 
        if that DataContainer is full
        """

        try:

            node = self.env['table'][val]
        except KeyError:
            return False
        if isinstance(node, DataContainer):

            return node.isfull()

        return False

    def get_graph(self):
        return self._graph

    def set_graph(self, graph):
        self._graph = graph

    graph = property(get_graph, set_graph)
Beispiel #18
0
class VectorFactory( object ):
    """
    factory class to contain data into a subclass of 
    dataContainer and put that into a vector
    """
    
    __metaclass__ = Profileable
    """
    Class used as a convinience method to contain various data formats and
    return a vector
    """
#    graphmgr = GraphManager()
    env = InstanceManager()
    
    @note("class is initialized by user")
    def __init__( self ):
        """
        nothing done in the init
        """
        pass
    
    def __call__( self, data ):
        """
        contain data into a subclass of dataContainer and put that into a vector
        """
        if isinstance(data, ADI):
            newdata = data.container

        else:
            newdata = contain( data )
            self.env['graphmgr'].add_source( newdata )

        return Vector( newdata )
    
    def __str__(self):
        return "<SLIMpy function: vector>"
    
    def __repr__(self):
        return "SLIMpy.VectorFactory( )"
Beispiel #19
0
    def __init__(self, *arg, **kw):
        '''
        
        '''
        new_kw = dict(version=make_version())
        new_kw.update(kw)

        OptionParser.__init__(self, *arg, **new_kw)

        self._required = set()

        self._types = {}
        self._defaults = {}
        self._prog_args = []

        self.env = InstanceManager()

        self.add_all_slim_options()

        from slimpy_base.setup.DEFAULTS import DEFAULTS

        self.set_defaults(**DEFAULTS)
Beispiel #20
0
class WorkerTester(TestCase):
    env = InstanceManager()

    def testrun(self):

        worker = Worker("nodename", self.env.current_env, processor=1)


#        with self.env['center'] as center:

#            worker.start()

    def testsafe_release_lock(self):
        if NI: raise NotImplementedError("test not implemented")

    def test__str__(self):
        if NI: raise NotImplementedError("test not implemented")

    def testmain_loop(self):
        if NI: raise NotImplementedError("test not implemented")

    def testprint_(self):
        if NI: raise NotImplementedError("test not implemented")

    def testabort(self):
        if NI: raise NotImplementedError("test not implemented")

    def testrun_job(self):
        if NI: raise NotImplementedError("test not implemented")

    def test__init__(self):

        worker = Worker("nodename", self.env.current_env, processor=1)

        center = self.env['center']

        self.failUnless(("nodename", 1) in center.idle)
class solver(object):
    """
    Abstract class
    """
    #    slimvars = GlobalVars()
    env = InstanceManager()

    #    log = env['record']

    def __init__(self):

        pass

    def solve(self, A, x):
        """
        Not ImplementedError
        """
        raise NotImplementedError("Must subclass sovler base class")

    def __str__(self):
        return "<SLIMpy abstract solver>"

    def __repr__(self):
        return "<SLIMpy abstract solver>"
Beispiel #22
0
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Lesser General Public License for more details.

You should have received a copy of the GNU Lesser General Public License
along with SLIMpy . If not, see <http://www.gnu.org/licenses/>.
"""

from slimpy_base.Core.User.Structures.Scalar import *
from numpy import real_if_close as numpy_real_if_close

from slimpy_base.Environment.InstanceManager import InstanceManager
from slimpy_base.utils.RegisterType import register_slimpy_func

## The SLIMpy global environment
__env__ = InstanceManager( )

## for * import 
__all__ = [ "scalar_max",
           "scalar_min",
           "real_if_close",
           "scalar_value",
           "scalar_mean" ]

def generate_new_scalar( one, other, func):
    """
    generate_new_scalar( one, other, func) -> scalar
    """
    scalar = Scalar( )
    trg_scal = Target( scalar )
    src_self = Source( one )
Beispiel #23
0
class Vector( ADI ): 
    """
    When creating a SLIMpy Vector using the command 
    vector(filename) several variables are initialized.
    The only input that the user should need is `file` 
    corresponding to a file on disk. The other variables are mostly for internal use.
    If file is not None, creates a vector instance with a pointer to the file `file`.
    If create is given as a list on integers a vector with a temporary 
    name and the given size will be created.
    There are more easy to use functions such as zeros to create a file
    """
    ## for string
    name = "Vector"

    ## slimpy global variables
    env = InstanceManager()
    
    ## vector implements  VectorType
    __metaclass__ = VectorType
    
    
    def __new__( cls, *args ): #IGNORE
        if args and isinstance(args[0], ndarray):
            from slimpy_base.User.AumentedMatrix.AugVector import AugVector
            avec = AugVector( args[0] ).ravel()
            for idx, container in enumerate( avec):
                avec[idx] = Vector( container )
            return avec
        else:
            return object.__new__( cls, *args )
    
    ## Constructor form a data container
    def __init__( self, container ):
        """
        data - must be a data_container.
        
        """
        ADI.__init__( self, container )
        self.__transpose_flag = False
        self.__sorted_vector = None
        
    ##########################################################################
    ###                      OPERATOR OVERLOADS                            ###
    ##########################################################################
    
    def _get_transp_flag(self):
        return self.__transpose_flag
    def _set_transp_flag(self,val):
        self.__transpose_flag = bool(val)
    
    ## test if vector is transposed    
    is_transp = property( _get_transp_flag, _set_transp_flag )
     


    def __setitem__( self, index, item ):
        """
        Sets an item in the vector dictionary
        """
        raise Exception, "not implemented. Ask me to do it later"
    
    ## returns a @ref slimpy_base.Core.User.Structures.VectorSpace.VectorSpace "VectorSpace"  representing this vector
    def getSpace( self ):
        """
        returns a Space object that contains all of the information
        about this vector
        """
        from slimpy_base.Core.User.Structures.VectorSpace import VectorSpace
        
        return VectorSpace( self.getParameters( ) )
    
    ## getter property for getSpace  
    space = property( getSpace )
                
# Overloads [i:j] Returns a __numarray array from index i to j from the binary file
#    def __getslice__(self,start,stop,step=1):
#        """
#        Returns a numarray array from index i to j from the binary file.
#        If j is less than i or i is greater than the length of the vector, returns a 
#        numarray array of zero elements. 
#        Otherwise it behaves like a list.
#        """
#        s = slice(start,stop,step)
#        
#        return self.genScalar('getslice',s)
        #raise Exception , "not implemented. Ask me to do it later"

    # Overloads [i:j] Writes a __numarray array from index i to j to the binary file
    
    ## set a slice of this data inplace
    def __setslice__( self, i, j, data ):
        """
        Writes a numarray array from index i to j to the binary 
        file corresponding to the data_container.
        """
        raise Exception , "not implemented. Ask me to do it later"

    ## @name Vector Transformation Operations
    # 
    # @{

    ## Overload the '+' __operator if other can be a scalar or another vector
    # for self + other invokes math()
    def __add__( self, other ):
        """
        vec1.__add__(vec2) <==> vec1 + vec2
        """
        if other is 0:
            return self
        if other is self:
            return self *2
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'add', vec=par )
        else:
            par = self.source_or_num( par )
            return self.generateNew( 'add', par )
        
    ## Overload the '+' __operator for other + self
    def __radd__( self, other ):
        """
        vec1.__radd__(vec2) <==> vec2 + vec1
        vec2 may be a scalar or a vector
        """
    
        return ( self + other )

    ## Overload the '-' __operator if other can be a scalar or another vector
    def __neg__( self ):
        """
        vec1.__neg__() <==> -vec1
        """
        return self.generateNew( 'neg' )

    ## Overload the '-' __operator if other can be a scalar or another vector
    def __sub__( self, other ):
        """
        self..__sub__(other) <==> self - other
        vec2 may be a scalar or a vector
        """
        if other is 0:
            return self
        if other is self:
            return self.space.zeros()
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'sub', vec=par )
        else:
            par = self.source_or_num( par )
            return self.generateNew( 'sub', par )
            
    ## Overload the '-' __operator
    # if other can be a scalar or another vector for other - self
    def __rsub__( self, other ):
        """
        vec1.__rsub__(vec2) <==> vec2 - vec1
        vec2 may be a scalar or a vector
        """
        
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'rsub', vec=par )
        else:
            par = self.source_or_num( par )

            return self.generateNew( 'rsub', par )

    ## Overload the '/' __operator if other can be a scalar or another vector
    def __div__( self, other ):
        """
        vec1.__div__(vec2) <==> vec1 / vec2
        vec2 may be a scalar or a vector
        """
        if other is 1:
            return self
        if other is self:
            return self.space.ones() 
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'div', vec=par )
        else:
            par = self.source_or_num( par )

            return self.generateNew( 'div', par )

    ## Overload the '/' __operator if other can be a scalar or another vector
    def __rdiv__( self, other ):
        """
        vec1.__rdiv__(vec2) <==> vec2 / vec1
        vec2 may be a scalar or a vector
        """
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'rdiv', vec=par )
        else:
            par = self.source_or_num( par )
            return self.generateNew( 'rdiv', par )

    ## Overload the '*' __operator if other can be a scalar or another vector
    def __mul__( self, other ):
        """
        vec1.__mul__(vec2) <==> vec1 * vec2
        vec2 may be a scalar or a vector
        """
        if other is 0:
            return 0
        if other is 1:
            return self
        if other is self:
            return self ** 2
        
        par = other
        if isinstance( other, Vector ):
            
            if self.is_transp ^ other.is_transp:
                if self.is_transp:
                    return inner_product( self, other )
                else:
                    return outer_product( self, other )
                
            par = Source( other.getContainer() )
            return self.generateNew( 'mul', vec=par )
        else:
            par = self.source_or_num( par )

            return self.generateNew( 'mul', par )
        
    ## Overload the '*' __operator if other can be a scalar or another vector
    def __rmul__( self, other ):
        """
        vec1.__rmul__(vec2) <==> vec2 * vec
        """
        return self.__mul__( other )

    ## Overload the '**' __operator if other can be a scalar or another vector
    def __pow__( self, other ):
        """
        vec1.__pow__(vec2) <==> vec1 ** vec2
        vec2 may be a scalar or a vector
        """
        if other is 1:
            return self
        par = other
        if isinstance( other, Vector ):
            par = Source( other.getContainer() )
            return self.generateNew( 'pow', vec=par )
        else:
            par = self.source_or_num( par )

            return self.generateNew( 'pow', par )

    ## Overload abs() 
    def __abs__( self ):
        """
        Vector.__abs__(vec2) <==> abs(vec1)
        vec2 may be a scalar or a vector
        """
        return self.generateNew( 'abs' )

    ##########################################################################
    # Boolean comparison operators 
    ##########################################################################
    
    ## boolean comparison
    def __or__(self, other):
        """
        vec1 | vec2 -> vec3
        """
        return spboolean.or_( self, other )
    
    ## boolean comparison
    def __and__(self,other):
        "vec1 & vec2 -> vec3"
        return spboolean.and_( self, other )
    
    ## boolean comparison
    def __lt__(self,other):
        """
        vec1.__lt__(vec2) <==> vec1 < vec2
        """
        return spboolean.less_than( self, other )

    ## boolean comparison
    def __gt__(self,other):
        """
        vec1.__gt__(vec2) <==> vec1 > vec2
        """
        return spboolean.greater_than( self, other )

    ## boolean comparison
    def __le__(self,other):
        """
        vec1.__le__(vec2) <==> vec1 <= vec2
        """
        return spboolean.less_than_eq(self, other)
    
    ## boolean comparison
    def __ge__(self,other):
        """
        vec1.__lt__(vec2) <==> vec1 >= vec2
        """
        return spboolean.greater_than_eq(self, other)

    ## boolean comparison
    def __eq__(self,other):
        """
        vec1.__eq__(vec2) <==> vec1 == vec2
        """
        return spboolean.equal(self, other)
    
    ## boolean comparison
    def __ne__(self,other):
        """
        vec1.__ne__(vec2) <==> vec1 != vec2
        """
        return spboolean.not_equal( self, other )

    ## thresholding operation
    # @param obj can be a Scalar or vector
    # @param mode one of 'soft', 'hard' or 'nng'
    def thr( self, obj, mode='soft' ):
        """
        Returns a thresholded vector.
        obj - may be a scalar or a vector.
        mode - may be 'soft', 'hard' or 'nng'
        """
        par = obj
        if isinstance( obj, Vector ):
            par = Source( obj.getContainer() )
            return self.generateNew( 'thr' , mode=mode, fthr=par )
        else:
            par = self.source_or_num( par )

            return self.generateNew( 'thr' , mode=mode, thr=par )

    ## hard thresholding
    def thrhard( self, obj ):
        """
        Returns a thresholded vector.
        obj - may be a scalar or a vector.
        Same as vector.sort(obj,'hard')
        """
        return self.thr( obj, mode='hard' )

    def garrote( self, obj ):
        """
        Returns a garroted vector.
        obj - may be a scalar or a vector.
        Same as vector.sort(obj,'nng')
        """
        return self.thr( obj, mode='nng' )        


    ## Sort a vector by absolute values
    # @param ascmode sort in ascending or descending order 
    # @param memsize define memory size of system 
    def sort( self, ascmode=False, memsize=None ):
        """
        Returns a vector with a sorted data set.
        """
        if memsize is None:
            memsize = self.env['slimvars']['memsize']
        
        return self.generateNew( "sort", memsize=memsize, ascmode=ascmode )

    ## real part of vector 
    def __real__( self ):
        """
        Returns a vector containing the real component of a complex vector.
        """
        if self.space.isComplex():
            return self.generateNew( 'real' )
        else:
            return self
       
    ## imaginary part of vector
    def __imag__( self ):
        """
        Returns a vector containing the imaginary component of a complex vector.
        """
        return self.generateNew( 'imag' )
    

    ## @}

    real =  __real__ 
    imag =  __imag__ 
    
    ##########################################################################
    ###                  END OF OPERATOR OVERLOADS                         ###
    ##########################################################################
    
    ## @name Scalar Reduction Operations
    # 
    # @{
    
    def __getitem__( self, index ):
        """
        If index is a string: Returns an item from one of the dictionaries. 
        Checks the vector dictionary first.
        If index is a number: Returns a number at index from the binary file.
        """
        
        if isinstance( index, (int,Scalar) ):
            ind = self.source_or_num(index)
            return self.scalar_reduction( 'getitem', ind )
        
        elif isinstance( index, str ):
            raise Exception, "not implemented. Ask me to do it later"
        
        elif isinstance( index, slice ):
            
            l = len( self.getSpace() )
            ind = index.indices( l )
            return self.scalar_reduction( 'getslice', ind )
        else:
            raise TypeError , "expected int or string got %s" % type( index )

    ## root mean square of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def rms( self ):
        """    Returns the root mean square"""
        return self.scalar_reduction( 'rms' )

    ## maximum element of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def max( self ):
        """    Returns the maximum value"""
        return self.scalar_reduction( 'max' )

    ## minimum element of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def min( self ):
        """    Returns the minimum value"""
        return self.scalar_reduction( 'min' )

    ## mean value of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def mean( self ):
        """    Returns the mean value"""
        return self.scalar_reduction( 'mean' )

    ## variance of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def var( self ):
        """    Returns the variance"""
        return self.scalar_reduction( 'var' )
    
    ## standard deviation of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    def sdt( self ):
        """    Returns the standard deviation"""
        return self.scalar_reduction( 'std' )

    ## lval-norm of vector
    # @return a slimpy_base.Core.User.Structures.Scalar.Scalar object
    # @param lval power of the norm
    def norm( self, lval=2 ):
        """    Returns the lval norm of the vector """
        return self.scalar_reduction( 'norm', lval )
   
    ## ith order statistic of the vector (ith smallest element),
    # @param i index within size of vector
    def orderstat( self, i ):
        """
        Returns the ith order statistic of the vector (ith smallest element), 
        i.e. i=0 is smallest, i=1 second smallest, etc.)
        Negative indexing starts with the largest element, i.e. i=-1 is largest, 
        i=-2 second largest, etc.
        """
        if not 'sorted' in self.__dict__:
            self.sorted = self.sort( ascmode=False )
                    
        if i<0:
            i = len( self.getSpace() ) + i
        
        return self.sorted[i]            

    ## @}
    # 
    

    
    #===============================================================================
    # DEPRECATED    
    #===============================================================================
    
    ## @name Deprecated
    # these functions have been replaced by linear operators
    # @{
     
    def conj( self ):
        """
        return vector conjugate of data
        """
        if self.space.isComplex():
            return self.generateNew( 'conj' )
        else:
            return self
    
    ## gradient of the vector
    def grad( self ):
        """
        Returns a vector containing the gradient of the vector.
        """
        #return __comp([self.generateNew('halfderiv'),self.generateNew('halfderiv')])
        return self.generateNew( 'grad' )

    # NOISE
    def noise( self, mean=0, seed=None ):
        """
        Returns vector with added random noise.
        mean - noise mean
        seed - noise seed
        """
        param = dict( mean=mean, seed=seed )
        if seed is None:
            param.pop( 'seed', None )
        
        return self.generateNew( 'noise', **param )

    ## TRANSP
    # @TODO: Move transp to image
    # @deprecated  
    def transp( self, a1=1, a2=2, memsize=100 ):
        """
        transpose a dataset
        """
        if a1 == a2:
            return self

        return self.generateNew( 'transp', plane=( a1, a2 ), memsize=memsize )

    ## @}
    
            
    
    def _transpose(self):
        vec = Vector( self.container )
        vec.is_transp = not vec.is_transp
        return vec
    
    ## symbolic transpose of the vector 
    # @warning does not transpose underlying image  
    T = property( _transpose )

    def _conj_transp(self):
        
        vec = Vector(self.container).conj( )
        vec.is_transp = not self.is_transp
        return vec
    
    H = property( _conj_transp )
    
    ## sets the name of the file on disk
    def setName( self, name, path=None ):
        """
        set the name of this vector. Makes it non persistent.
        """
        if not path is None:
            name = join( path, name )
            
        container = self.getContainer()
        
        container.setName( name , path=None )
        self.env['graphmgr'].add_target( container )
        
        return self 
    
    ## plot vector 
    # @todo: should not be a vector method  
    def plot( self, command=None, plotcmd=None ):
        """
        calls plot method of this vectors container
        """
        try:
            self.flush()
        except:  #IGNORE:W0704
            pass #IGNORE:W0702
        self.container.plot( command=command, plotcmd=plotcmd )
    
    ## reshape underlying image
    # @todo: should not be a vector method
    def reshape(self, *shape):
        if len(shape) is 1 and isinstance(shape[0], (list,tuple)):
            shape = shape[0]
            
        return self.generateNew( "reshape" , shape=shape )
Beispiel #24
0
SLIMpy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Lesser General Public License for more details.

You should have received a copy of the GNU Lesser General Public License
along with SLIMpy . If not, see <http://www.gnu.org/licenses/>.
"""

from unittest import TestCase, defaultTestLoader
from slimpy_base.Core.Interface.Structure import Structure
from slimpy_base.Core.Command.Command import Command
from slimpy_base.Environment.InstanceManager import InstanceManager

_env = InstanceManager()
NI = True


class StructureTester(TestCase):
    def testgenerate_command(self):

        struct = Structure()

        _env['slimvars']['keep_tb_info'] = False

        c1 = struct.generate_command('cmnd', a1=11)

        c2 = Command('cmnd', None, a1=11)

        self.failUnlessEqual(c1, c2)
Beispiel #25
0
class Scalar(object):
    '''
    Scalar object contains class methods to work on rsf data
    '''

    env = InstanceManager()
    #    log = Log()
    re_float = re_compile('[-+]?\d*\.\d*')

    @classmethod
    def get_command(cls, cmd):
        '@deprecated: '
        if hasattr(cls, cmd):
            return cls(cmd)
        else:
            raise TypeError('No Scalar command "%s" in RSF ' % cmd)

    def __init__(self, cmd):
        if not hasattr(self, cmd):
            raise TypeError('No Scalar command "%s" in RSF ' % cmd)
        self._command_name = cmd

    def __call__(self, container, scalar, *args, **kw):
        '''
        only non classmethod calls methoc given by self._command_name
        '''
        if not hasattr(self, "_command_name"):
            raise AttributeError("scalar class not initialized")

        cmd = self._command_name
        attr = getattr(self, cmd)
        container.node_copy('localhost')
        attr(container, scalar, *args, **kw)

#    slimvars = GlobalVars()

    def __str__(self):
        return "rsf scalar method %s" % self._command_name

    def __repr__(self):
        return self.__str__()

    @staticmethod
    def rms(container, scalar):
        """    Returns the root mean square"""
        num = Scalar.attr(container, scalar, want='rms')
        scalar.set(num)

    @staticmethod
    def max(container, scalar):
        """    Returns the maximum value"""
        num = Scalar.attr(container, scalar, want='max')
        scalar.set(num)

    @staticmethod
    def min(container, scalar):
        """    Returns the minimum value"""
        num = Scalar.attr(container, scalar, want='min')
        scalar.set(num)

    @staticmethod
    def mean(container, scalar):
        """    Returns the mean value"""
        num = Scalar.attr(container, scalar, want='mean')
        scalar.set(num)

    @staticmethod
    def var(container, scalar):
        """    Returns the variance"""
        num = Scalar.attr(container, scalar, want='var')
        scalar.set(num)

    @staticmethod
    def sdt(container, scalar):
        """    Returns the root"""
        num = Scalar.attr(container, scalar, want='std')
        scalar.set(num)

    @staticmethod
    def norm(container, scalar, lval=2):
        """    Returns the lval norm of the vector """
        is_inf = lval == inf or (isinstance(lval, str)
                                 and lval.lower() == 'inf')  #IGNORE:E1103

        if is_inf:
            a1 = Scalar.attr(container, scalar, want='max')
            a2 = Scalar.attr(container, scalar, want='min')

            num = max(abs(a1), abs(a2))
        else:

            num = Scalar.attr(container, scalar, want='norm', lval=lval)

        scalar.set(num)

    @staticmethod
    def attr_make_number(atr):
        '''
        call eval on atr if result is tuple make complex
        @param atr: string 
        @type atr:str
        '''

        num = eval(atr)
        if isinstance(num, tuple):
            num = complex(*num)
        return num

    @staticmethod
    def attr(container, scalar, want='norm', lval=2):
        """
        fetches various attributes of the vector.
        vector will be flushed prior to the operation.
        TODO: make it so that the vector does not flush automatically. 
        """

        #        command = "%(path)s < %(data)s want=%(want)s lval=%(lval)s" %vars()
        command = ATTRMD(attr=SFATTR(),
                         file=container.get_data('localhost'),
                         want=want,
                         lval=lval)
        #define parameters for the pipe
        print >> __env__['record'](1, 'cmd'), command
        p0 = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
        err = p0.wait()
        if err:
            lines = p0.stderr.read()
            p0.stderr.close()
            p0.stdout.close()
            raise IOError(err, command + '\n' + lines)
        output = p0.stdout.read()
        p0.stderr.close()
        p0.stdout.close()

        n = output.split('=')[-1].split('at')[0]

        return Scalar.attr_make_number(n)

    @staticmethod
    def project(container, scalar, lamb=None):
        '''
        returns threshold value that will
        threshold the vector to a 1-norm of lamb
        '''
        if lamb is None:
            raise TypeError('Must specify "lamb=" value')

        command = PROJCMD(
            project=SFPROJECT(),
            file=container.get_data('localhost'),
            lamb=lamb,
        )

        p0 = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
        err = p0.wait()
        if err:
            lines = p0.stderr.read()
            p0.stderr.close()
            p0.stdout.close()
            raise IOError(err, command + '\n' + lines)
        n = p0.stdout.read()
        p0.stderr.close()
        p0.stdout.close()

        assert 'tau=' in n

        num = n.split('=')[-1].strip()
        scalar.set(float(num))

    @staticmethod
    def getitem(container, scalar, item):
        "get item-th element from the vector"
        if not isinstance(item, int):
            raise TypeError, "getitem got %(item)s expected int"

#        params = container.getParameters( )
        print >> __env__['record'](1, 'cmd'), "getitem( %s )" % (item)

        item = container.readbin(start=item, len=1)[0]

        l = lambda x: (
            (x == int32 and int) or (x == float32 and float) or
            (x == float64 and float) or (x == complex64 and complex))

        t = l(type(item))

        num = t(item)
        scalar.set(num)

        return
Beispiel #26
0
class rsf_data_container(DataContainer):
    """
    rsf_data_container - to keep track of "out of core" vectors
    corresponding binary files on disk.
    """
    suffix = ".rsf"
    psuffix = ".vpl"
    name = "rsf"
    #    slimvars = GlobalVars()
    #    log = Log()

    env = InstanceManager()

    COUNT = 0
    sfFactory = rsfCommandFactory()

    #    command = "%(path)s < %(data)s want=%(want)s lval=%(lval)s" %vars()
    _scalar_methods = ScalarMethods

    @classmethod
    def isCompatibleWith(cls, obj):
        '''
        statict method to determine if 'obj' is an rsf file
        @param obj:
        @type obj: any object that would be 
            contained in a datacontainer class
        '''

        obj = str(obj)

        if obj.endswith(cls.suffix):
            if not isfile(obj):
                raise Exception, "the file %s can not be found" % (obj)
            return True
        if isfile(obj + cls.suffix):
            return True

        return False

    def __init__(self,
                 data=None,
                 parameters=None,
                 command=None,
                 tmp=None,
                 nodenames=None,
                 target_node=None):

        #        self.scalar = Scalar()
        self.SFRM = join(self.env['slimvars']['RSFBIN'], 'sfrm')
        self.SFGREY = join(self.env['slimvars']['RSFBIN'], 'sfgrey')
        self.SFMV = join(self.env['slimvars']['RSFBIN'], 'sfmv')
        self.XTPEN = join(self.env['slimvars']['RSFBIN'], 'xtpen')
        self.SFATTR = join(self.env['slimvars']['RSFBIN'], 'sfattr')
        self.SFDD = join(self.env['slimvars']['RSFBIN'], 'sfdd')

        self.PLOTCMD = Template("< ${data} ${plotcmd}  |  ${viewcmd} &")
        self.MVCMD = Template("${mv} ${From} ${To}")
        self.RMCMD = Template("${rm} ${file}")

        self.ATTRMD = Template("${attr} <  ${file} want=${want} lval=${lval} ")

        node_info = None

        if data is not None:
            data = str(data)
            data = abspath(data)
            if not data.endswith(self.suffix):
                data = data + self.suffix

            dname = dirname(data)
            data = basename(data)
            node_info = dict(location=dname)

        DataContainer.__init__(self,
                               data=data,
                               parameters=parameters,
                               command=command,
                               tmp=tmp,
                               nodenames=nodenames,
                               node_info=node_info,
                               target_node=target_node)

    def __setitem__(self, item, value):
        """
        """
        raise NotImplementedError

#        assert item == 'data', "can not set the item %s(item)s" % vars()

    def makeparams(self):
        '''
        returns a param object created from the data contained
        ie. the rsf header info
        '''
        par = DataContainer.makeparams(self)

        # RSF specific stuff
        try:
            format, data_type = par['data_format'].split('_')
        except KeyError:
            print self, 'has no key "data_format"'
            raise

        par['data_type'] = data_type

        return par

    def genname_helper(self, string):
        """
        formatting and better output files, no more tempname just 'slim.%prog.%cmd.%id.rsf'
        where %prog is the main programs name and %cmd is the last command 
        on the pipe that created the data and 
        %id is a unique incremented identifier
        """
        main_file = "ineractive"

        main = modules['__main__']
        if hasattr(main, '__file__'):
            main_file = main.__file__

        prog = basename(main_file)[:-3]

        join_dot = lambda *args: ".".join(args)

        pid = str(getpid())
        cur_env = self.env.current_env

        make = lambda fcn, cnt: join_dot('slim', pid, cur_env, prog[:6],
                                         fcn[:5], "%.5d" % cnt) + self.suffix

        if exists(string):
            string = split(string)[1]
        self.__class__.COUNT += 1
        filename = make(string, self.COUNT)
        while exists(filename):
            self.__class__.COUNT += 1
            filename = make(string, self.COUNT)
        return filename

    def genName(self, command=None):
        """
        generate a random name if command is given
        then generate a unique but formatted name
        'slim.%prog.%cmd.%id.rsf'
        """
        if command is not None and hasattr(command, 'tag') and isinstance(
                command.tag, str):
            filename = self.genname_helper(command.tag)
        elif command is not None and isinstance(command, str):
            filename = self.genname_helper(command)
        else:
            #            raise Exception( "must spcify a valid command to generate an appropreate file" )
            td = self.get_tmp_dir()
            filename = tempnam(td) + self.suffix

        return filename

    def get_tmp_dir(self):
        if self.is_local:
            return self.env['slimvars']['localtmpdir']
        else:
            return self.env['slimvars']['globaltmpdir']

    def isempty(self):
        """
        check if this data has bee built
        """
        local_data = self.get_data('localhost')
        if isfile(local_data):
            return False

        if self.data:
            if 'localhost' in self.nodenames:
                return not isfile(local_data)
            else:
                return not len(self.nodenames)

#    def getConverter( self , command ):
#        '''
#        return converter class
#
#        command must have attribute 'function'
#        '''
#        return self.sfFactory[command.tag]

    @classmethod
    def get_converter(self, command):

        if isinstance(command, str):
            tag = command
        else:
            tag = command.tag
        return self.sfFactory[tag]

    def __str__(self):
        """Adds the current lib's suffix to the end of filename
        note: if no lib is set then self.plugin.suffix returns ''
        """
        if self.data != None:
            return str(self.data)
        else:
            return "None"

    def __repr__(self):
        return str(self)

    def path(self):
        """
        returns the absolute pathname to the file
        """
        pathstr = abspath(str(self))
        pathstr = dirname(pathstr)
        return pathstr

    def getName(self):
        'returns the name of the data contained'
        return self.data

    def plot(self, command=None, plotcmd=None):
        """
        plot returns the path the the plotfile
        """

        if command is None:

            if plotcmd is None:
                plotcmd = self.SFGREY

            command = self.PLOTCMD.substitute(data=self.local_data,
                                              plotcmd=plotcmd,
                                              viewcmd=self.XTPEN)

        print >> self.env['record'](1, 'plot'), command
        system(command)

        return None

    def _get_local_data(self):
        return self.get_data("localhost")

    local_data = property(_get_local_data)

    def _get_any_node(self):
        if 'localhost' in self.nodenames or not self.nodenames:
            node = 'localhost'
        else:
            node = list(self.nodenames)[0]

        return node, self.get_data(node)

    def setName(self, newname, path=None):
        """wrapped by SLIMpy.serial_vector.setname"""
        newname = str(newname)

        if newname.endswith(self.suffix):
            newname = newname
        else:
            newname = newname + self.suffix

        if not path:
            newname = abspath(newname)
            path = dirname(newname)
            newname = basename(newname)

        if self.isfull():
            self.move(newname)

        self.updateData(newname)
        ninfo = self._node_info.setdefault('localhost', {})
        self.add_node_name('localhost')
        ninfo['location'] = path

        self.tmp(False)

    def updateData(self, newname):
        '''
        @param newname: string to rename data to
        @type newname:
        '''
        self.data = newname

    def getHeader(self):
        """
        return open header, data must exist
        """

        node, data = self._get_any_node()
        if node == 'localhost':
            return open(data)

        else:
            cat = Unix_Pipes.CreateComand(['cat'], node_name=node, source=data)
            p0 = Popen(cat, shell=True, stdout=__PIPE__)

        return p0.stdout

    def move(self, newname):
        "move data file on disk"

        mv = self.MVCMD.substitute(mv=self.SFMV,
                                   From=self.local_data,
                                   To=abspath(newname))

        err = system(mv)
        if err is not 0:
            raise Exception("commmand %(mv)s failed " % vars())

    def rm(self):
        """
        removes the file on disc
        """

        print >> self.env['record'](
            10, 'cleaner'), "call to remove %s:" % self.data
        print >> self.env['record'](
            11, 'cleaner'), "\tistmp=%s , isfull=%s" % (self.istmp(),
                                                        self.isfull())

        if not self.istmp():
            return False
        if self.isempty():
            return False

        err = 0
        cmd_log1 = self.env['record'](1, 'cmd')
        #        print "rm called"

        synccmd = self.env['slimvars']['SYNC_CMD']
        do_sync = self.env['slimvars']['sync_disk_om_rm']

        def sync(synccmd):

            sync_command = Unix_Pipes.CreateComand([synccmd], node)
            print >> cmd_log1, sync_command
            p0 = Popen(sync_command, shell=True, stderr=__PIPE__)
            ret = p0.wait()
            if ret:
                print >> self.env[
                    'record'], 'error running %(sync_command)s ' % vars()
                print >> self.env[
                    'record'], 'try running SLIMpy with "sync_disk_om_rm=False" ' % vars(
                    )

        for node in self.nodenames.copy():

            data = self.get_data(node)
            sfrm = self.RMCMD.substitute(rm=self.SFRM, file=data)
            rm = self.RMCMD.substitute(rm='rm', file=data)
            if do_sync:
                sync(synccmd)

            command = Unix_Pipes.CreateComand([sfrm], node)
            print >> cmd_log1, command
            p0 = Popen(command, shell=True, stderr=__PIPE__)
            ret = p0.wait()
            print >> self.env['record'](2), "finished::", node, 'rm'
            if ret:
                err += 1
                msg = p0.stderr.read()
                p0.stderr.close()
                print >> self.env[
                    'record'], 'error %(ret)s on %(command)s: removeing header file:\n%(msg)s' % vars(
                    )
                command = Unix_Pipes.CreateComand([rm], node)
                print >> cmd_log1, command
                p0 = Popen(command, shell=True, stderr=__PIPE__)
                ret = p0.wait()
                if ret:
                    msg = p0.stderr.read()
                    print >> cmd_log1, "could not 'sfrm' or rm 'data'\n%(msg)s" % vars(
                    )
                else:
                    self.nodenames.remove(node)
            else:
                self.nodenames.remove(node)

            p0.stderr.close()
            return not err

    def node_copy(self, node_name):
        """
        copy from one node to another
        """
        if self.is_global or self.isempty():
            return
        if node_name in self.nodenames:
            return

        tmddir = self.env['slimvars']['localtmpdir']
        from_cmd = "%s form=xdr" % self.SFDD
        to_cmd = "%s form=native" % self.SFDD

        from_node = list(self.nodenames)[0]

        from_data = self.get_data(from_node)
        to_data = self.get_data(node_name)

        is_tmp = self.istmp()
        to_cmd = Unix_Pipes.CreateComand([to_cmd],
                                         node_name=node_name,
                                         target=to_data,
                                         is_local=[True],
                                         is_tmp=[is_tmp],
                                         ssh_localhost=True)
        frm_node = list(self.nodenames)[0]

        self.add_node_name(node_name)

        cmd = Unix_Pipes.CreateComand([from_cmd, to_cmd],
                                      is_local=[None, None],
                                      is_tmp=[is_tmp, is_tmp],
                                      node_name=frm_node,
                                      source=from_data,
                                      ssh_localhost=False)

        print >> self.env['record'](1, 'cmd'), cmd
        assert frm_node != node_name, "can not copy from same node to same node"
        p0 = Popen(cmd, shell=True, stderr=__PIPE__)
        p0.wait()
        print >> self.env['record'](2, 'cmd'), "finished:: cp", node_name
        if p0.returncode:
            out = p0.stderr.read()
            raise IOError("copying from node to node retuned error %s\n\n%s" %
                          (p0.returncode, out))

    def available_from(self, nodename):
        if nodename in self.nodenames:
            return True
        elif 'localhost' in self.nodenames:
            return True
        return False

    def readattr(self):
        'returns rsf header info as a dict object'
        header = self.getHeader()
        lines = header.readlines()

        header.close()

        file_dict = {}
        for line in lines:
            if '=' in line:
                line = line[1:-1]
                line = line.split('=')
                try:
                    line[1] = eval(line[1])
                except:
                    pass
                file_dict[line[0]] = line[1]

        self.BinFile = file_dict.pop('in')
        #l = lambda x : (x=='native_int' and int) or (x=='native_float' and float) or (x=='native_double' and long) or (x=='native_complex' and complex)

        #file_dict['type'] = l(file_dict['data_format'])
        #del file_dict['data_format']

        return file_dict

    def readbin(self, start=0, len=-1):  #IGNORE:W0622
        '''
        @precondition: data must exist.
        @return: a numpy array.
        
        @param start: element in data to start from.  
        @type start: int
        
        @param:  len: lenght of data to read into array
                if -1 reads until the end of data.   
        @type len:int
        '''

        l = lambda x: ((x == 'native_int' and int32) or
                       (x == 'native_float' and float32) or
                       (x == 'native_double' and float64) or
                       (x == 'native_complex' and complex64))

        file_dict = self.readattr()
        typeflag = l(file_dict['data_format'])

        fd = open(self.BinFile)
        fd.seek(start * typeflag().nbytes)

        return fromfile(fd, typeflag, len)

    def writebin(self, data, start=0, size=0):
        """
        write data to file 
        not tested
        """
        header = self.path()
        start = start * 4

        vector_dict, file_dict = self.readattr(header)
        if size == 0:
            size = product(vector_dict['dim'])
        binfile = file_dict['in']
        fd = open(binfile, 'r+')
        #a = array.array('f', __numarray.transpose(data[:size]).tolist()[0])
        fd.seek(start)
        data[:, :size].tofile(fd)
        fd.close()

    def book(self, header):
        'not tested'
        header = str(header)
        f = open(header)
        L = f.read()
        return L

    def writeattr(self, header, vector_dict, file_dict):
        'not tested'
        file_dict = dict(file_dict)
        if file_dict.has_key('in'):
            del file_dict['in']
        if file_dict.has_key('data_format'):
            del file_dict['data_format']
        for i in range(1, len(vector_dict['dim']) + 1):
            file_dict['n%s' % i] = vector_dict['dim'][i - 1]
        fd = self.readattr(header)
        for item in fd.items():
            try:
                if file_dict[item[0]] == item[1]:
                    del file_dict[item[0]]
            except:
                pass
        book = ''
        for item in file_dict.items():
            book += "\t%s=%s\n" % (item[0], item[1])
        fd = open(header, 'r+')
        fd.seek(0, 2)
        fd.write(book)
        fd.close()

    def write(self, header, data, book):
        'not tested'
        header = str(header)
        fd = open(header, 'w')
        fd.write(book)
        fd.close()
        dict = self.readattr(header)
        fd = open(dict['in'], 'w')
        fd.close()
        self.writebin(header, data)

    def get_data(self, nodename):

        data = self.data
        if self.is_global:
            loc = self.get_data_loc('localhost')
        else:
            loc = self.get_data_loc(nodename)

        return join(loc, data)

    def get_data_loc(self, nodename):

        if nodename == 'localhost':
            path = self.env['slimvars']['globaltmpdir']
        else:
            path = self.env['slimvars']['localtmpdir']

        node_info = self._node_info.setdefault(nodename, {})
        path = node_info.setdefault('location', path)

        return path

    def diagnostic(self, nodename=None):
        """
        run a check if this data is valid 
        """
        log = self.env['record'](1, 'cmd', 'err')
        log2 = self.env['record'](2, 'diagnostic')
        print >> log
        print >> log, "Runnind diagnostic on data %s" % self
        print >> log

        acmd = self.ATTRMD.substitute(attr=self.SFATTR,
                                      file=str(self),
                                      want='all',
                                      lval=2)

        if nodename is None:
            nodename = list(self.nodenames)
        elif isinstance(nodename, str):
            nodename = [nodename]

        did_run = False
        for node in nodename:
            did_run = True
            attr_command = Unix_Pipes.CreateComand([acmd], node)
            print >> log2, attr_command
            p0 = Popen(attr_command,
                       shell=True,
                       stderr=_STDOUT,
                       stdout=__PIPE__)
            ret = p0.wait()

            lines = p0.stdout.read()
            print >> log2, lines
            if ret:
                print >> log, 'Error running Diagnostic on data "%(self)s"' % vars(
                )
                return False
            else:
                print >> log, 'File "%(self)s" is OK.' % vars()

        return did_run
Beispiel #27
0
class sfConverter(Converter):
    """
    Base rsf converter class used when specific 
    command can not be found
    @postcondition: tansform returns voidspace
    """
    env = InstanceManager()
    #    slimvars = GlobalVars()

    @classmethod
    def place_adder(cls, command):
        command.adder = rsfAddCommands()
        return command

    @classmethod
    def map(cls, source, command):

        cmnd = cls.default_function(command)

        return cls.pack(cmnd)

    @classmethod
    def guess_exe_path(cls, name, error=0):

        if name.startswith('sf'):
            sfname = name
        else:
            sfname = "sf" + name
        slimvars = cls.env['slimvars']
        sfname = join(slimvars['RSFBIN'], sfname)

        if is_executable(name):
            return abspath(name)

        elif is_executable(sfname):
            return sfname

        elif WhereIs(name):
            return WhereIs(name)

        elif error:
            raise EnvironmentError("No files '%(name1)s' or '%(name2)s'" %
                                   vars())
        else:
            return name

    @classmethod
    def default_function(cls, command, name=None):
        slimvars = cls.env['slimvars']
        if name is None:
            name = command.tag
        else:
            name = str(name)

        exe = cls.guess_exe_path(name, error=slimvars['check_path'])

        command.func = OutOfCoreDriver(exe)
        command.adder = rsfAddCommands()

        return command

    @classmethod
    def mpi_function(cls, command, name=None, num_proc='all'):
        slimvars = cls.env['slimvars']
        MPICOM = Template(slimvars['MPIFLAGS'])

        if name is None:
            name1 = command.tag
        else:
            name1 = name

        name2 = join(slimvars['RSFBIN'], 'xsf' + name1)

        if isfile(name1):
            if isabs(name1):
                command_name = name1
            else:
                command_name = join(".", name1)
        else:
            command_name = name2

        if slimvars['check_path'] and not isfile(command_name):
            raise EnvironmentError("No files '%(name1)s' or '%(name2)s'" %
                                   vars())


#        assert slimvars['mpi'], "called mpi map while slimpy mpi var is false"

        if "$NODEFILE" in slimvars['MPIFLAGS'] and slimvars['NODEFILE'] is None:
            raise Exception(
                "SLIMpy detected an mpi process to be run needs a nodefile: "
                "please set 'NODEFILE' global variable")
        mpi_cmd = MPICOM.substitute(**slimvars.slimGlobals)

        cmd = "%(mpi_cmd)s %(command_name)s" % vars()
        command.func = OutOfCoreDriver(cmd)
        command.adder = rsfAddCommands()

        command.command_type = 'multi_process'
        command.num_proc = num_proc

        return command

    @classmethod
    def constr(cls, command, *Spaces):
        return True
Beispiel #28
0
class JobPosting(object):
    '''
    posting class for worker threads to get info about new jobs
    Like a message board.
    '''
    env = InstanceManager()

    def __init__(self, name, processor):

        self.lock = RLock()
        self.event = Event()
        self.lock.acquire()

        self.name = name
        self.processor = processor
        self.finished_jobs = set()
        self.current = set()
        self.todo = set()
        self._is_waiting = False

        self._first_wait = True
        self.start_time = 0
        self.stop_time = 0
        self.current_rec = None
        self.finished_rec = set()

        self.lock.release()

    def _get_waiting(self):
        'true if worker is waiting for a job'
        return self._is_waiting

    def _set_waiting(self, val):
        self._is_waiting = val

    waiting = property(_get_waiting, _set_waiting)

    def get_time_since_start(self):
        'get the time from the start of the job'
        if self.current_rec is None:
            return 0
        else:
            return time.time() - self.current_rec.created

    time_passed = property(get_time_since_start)

    def notify(self):
        'notify the listening worker'
        self.event.set()

    def new_todo(self, job):
        'add a job to the message board'
        self.lock.acquire()
        self.todo.add(job)
        self.lock.release()

    def get(self):
        'get a job from the todo pile'
        self.lock.acquire()
        job = self.todo.pop()
        self.current.add(job)
        self.current_rec = JobRecord(job, self.name, self.processor)
        self.current_rec.start()

        self.env['record'].add_job(self.current_rec)

        self.lock.release()
        return job

    def finished(self, job):
        '''
        adds current job to the finished pile
         
        '''
        self.lock.acquire()
        self.current.remove(job)
        self.finished_jobs.add(str(job))

        rec = self.current_rec
        rec.stop()
        self.finished_rec.add(rec)
        print >> self.env['record'](
            10, 'time'), "Command execution time: %0.4f seconds" % (
                rec.finished - rec.created)
        self.current_rec = None

        self.lock.release()

    def is_working(self):
        'true if the worker is working on a job'
        return len(self.current)

    def has_todo(self):
        'returns the len of the todo pile'
        return len(self.todo)

    def busy(self):
        'true if job is working or has stuff todo'
        self.lock.acquire()
        ret = self.is_working() or self.has_todo()
        self.lock.release()
        return ret

    def post(self, job):
        'post a job to the todo'
        self.lock.acquire()

        self.todo.add(job)
        self.event.set()

        self.lock.release()

    def start_timer(self):
        'starts timer '
        self.start_time = time.time()

    def stop_timer(self):
        'stops timer '
        self.stop_time = time.time()

    def wait_for_job(self):
        "wait for slef.notify to be called"
        self.event.wait()
        self.event.clear()
        return

    def acquire(self):
        self.lock.acquire()

    def release(self):
        self.lock.release()

    def total_time(self):
        "total time between start and stop"
        return self.stop_time - self.start_time

    def time_idol(self):
        'total time spent idle'
        tot = self.total_time()
        for jobrecord in self.finished_rec:
            tot -= jobrecord.total_time()
        return tot
Beispiel #29
0
class RemoteDriver(object):
    """
    used to chain ooc_driver commands together
    """
    env = InstanceManager()
    _stderr_logfile = "stderr_logfile.log"

    def _get_stderr_logfile(self):
        return join(self.env['slimvars']['localtmpdir'], self._stderr_logfile)

    stderr_logfile = property(_get_stderr_logfile)

    def __init__(self):
        self.__cmnd = []
        self.__Target = None
        self.__Source = None
        self._node_name = None
        return

    def get_num_proc(self):

        numbers = [com.get_nub_proc() for com in self.getCmnd()]

        return max(numbers)

    def getCmnd(self):
        return self.__cmnd

    def get_command_list(self):
        return self.__cmnd

    def addCommand(self, command):
        self.setTarget(None)
        return self.__cmnd.append(command)

    def is_muti_proc(self):
        for cmd in self.get_command_list():
            if cmd.command_type == 'multi_process':
                return True
        return False

    def setSource(self, src):
        assert not self.getCmnd(
        ), "command already has attributes to it, cannot add a source"
        self.__Source = src
        return

    def getSource(self):
        return self.__Source

    def setTarget(self, tgt):
        self.__Target = tgt
        return

    def getTarget(self):
        return self.__Target

    def get_node_name(self):
        return self._node_name

    def set_node_name(self, val):
        self._node_name = val
        for cmd in self.get_command_list():
            cmd.node_name = val

    source = property(getSource, setSource)
    target = property(getTarget, setTarget)
    node_name = property(get_node_name, set_node_name)

    def get_targets(self):
        tgts = set()
        if self.target:
            tgts.add(self.target)

        for com in self.__cmnd:
            tgts.update(com._get_target_cont())
        return tgts

    def make_locals_list(self):
        lst = []
        push = lst.append

        for com in self.get_command_list():
            push(None)
            for target in com._get_target_cont():
                if target.is_global:
                    lst[-1] = False
                    break
                else:
                    lst[-1] = True

        if lst[-1] is False:
            pass  # do nothing target is already global
        elif self.target:
            # set it to the location of the stdout target
            is_local = bool(self.node_name != 'localhost')
            lst[-1] = is_local

        return lst

    def make_tmp_list(self):

        lst = []
        push = lst.append

        for com in self.__cmnd:
            push(None)
            for target in com._get_target_cont():
                if target.istmp():
                    lst[-1] = True
                    break
                else:
                    lst[-1] = False

        if self.target:
            lst[-1] = self.target.istmp()

        return lst

    def get_sources(self):
        srcs = set()
        if self.source:
            srcs.add(self.source)
        for com in self.__cmnd:
            srcs.update(com._get_source_cont())
        return srcs

    def __str__(self):

        format = [
            com.func.format(self.node_name, com.params, com.kparams)
            for com in self.getCmnd()
        ]

        command = " | ".join(format)
        if self.source:
            command = "< %s %s" % (self.source, command)
        if self.target:
            command = "%s > %s" % (command, self.target)

        return command
#        return "< %s %s > %s" %( self.getSource(), command , self.getTarget() )

    def __call__(self, *params, **kparams):
        """
        runs driver with pipe. uses the list of 
        """
        return self.run()

    def run(self):

        if self.node_name is None:
            raise Exception("job host not set")

        log = self.env['record'](5, 'cmd', 'err')
        #        print >> log
        #        print >> log, "command failed, running diagnostic"
        #        print >> log
        try:
            self._run()
        except IOError:

            print >> log, "command failed, running diagnostic"
            if self.env['slimvars']['run_again']:
                if self.diagnose():
                    print >> log, "All diagnostics passed: running command again"
                    self._run()
                else:
                    print >> log, "diagnostic failed: raising exception"
                    raise
            else:
                raise
        return

    def diagnose(self):

        for source in self.get_sources():
            if not source.diagnostic():

                return False

        return True

    def add_node_name_to_targets(self):

        if self.target:
            if self.target.is_local:
                self.target.add_node_name(self.node_name)
            else:
                self.target.add_node_name('localhost')

        for cmd in self.get_command_list():
            cmd.add_node_name_to_targets()

        return

    def copy_sources(self):
        """
        recursively copy source nodes if necessary
        """

        if self.source and self.source.is_local:
            if self.is_muti_proc():
                self.source.node_copy('localhost')
            else:
                self.source.node_copy(self.node_name)

        for cmd in self.get_command_list():
            cmd.copy_sources()

    def _run(self):
        center = self.env['center']

        self.copy_sources()

        center.acquire()
        cmd = self.pipe_format()

        log = self.env['record']
        print >> log(5, 'cmd'), cmd
        p3 = Popen(cmd, stderr=__PIPE__, shell=True)
        pid = p3.pid
        center.add_pid(pid)
        #        p3.tochild.close()
        center.release()

        try:
            err = p3.wait()
            print >> log(5), "finished::", self.node_name

        except OSError, e:
            if e.errno is 10:
                err = p3.returncode
            else:
                raise

        center.acquire()

        self.add_node_name_to_targets()

        center.remove_pid(pid)
        lines = p3.stderr.readlines()
        last_line = "".join(lines)  #@UnusedVariable

        if err:
            #            last_line = self.get_last_line_of_err_log()
            node = self.node_name  #@UnusedVariable
            center.release()
            raise IOError(
                err,
                "process %(pid)s on node %(node)s, returned %(err)s: \n%(last_line)s\nCommand:%(cmd)s"
                % vars())
        else:
            center.release()

            if last_line:
                print >> log(5, 'cmd', 'err'), last_line

            p3.stderr.close()
            return
Beispiel #30
0
class PSpace(object):
    """
    parameter is a simple tracking method to pass Metadata without
    being bound to each specific datatype
    methods are similar to a python dict
    """
    ## @var _space_dict
    # @brief dict of information about this space

    ## @var _plugin
    # @brief slimpy_base.api.Plugin represented by this space

    ## slimpy global variables
    env = InstanceManager()

    ## Constructor, create a space from another space or like a dict object
    #     @param plugin a string name or a subclass of DataContainer
    def __init__(self, plugin, *E, **D):
        """
        create a space from another space 
        """

        if isinstance(plugin, PSpace):
            self._space_dict = dict(plugin.params, *E, **D)
            self.plugin = plugin.plugin
        else:
            self._space_dict = dict(*E, **D)
            self.plugin = plugin

    ## Get the plugin represented by this space
    def _get_plugin(self):
        'Get the plug-in represented by this space'
        return self._plugin

    def _set_plugin(self, plugin):
        from slimpy_base.Core.Interface.ContainerBase import DataContainer
        plugin_type = type(plugin)
        assert plugin_type == str or issubclass(plugin, DataContainer)
        if isinstance(plugin, str):
            self._plugin = self.env['keystone'].getplugin(plugin)
        else:
            self._plugin = plugin

    ## get and set property for self._plugin
    plugin = property(_get_plugin, _set_plugin)

    ## Make a new container from the current parameters
    # @param command may be a string or a slimpy_base.Core.Command.Command.Command
    # @tmp if the contaier is a temporary file
    def makeContaner(self, command=None, tmp=None):
        """
        makeContaner( self, command=None ,tmp=None) -> Container
        Make a new container from the current parameters
        
        """
        return self.plugin(parameters=self, command=command, tmp=tmp)

    ## create a new PSpace updated with new keys
    def newParameter(self, *E, **F):
        """
        Make a new parameter updated with the new keys, 'keys'
        """
        SpaceType = type(self)
        x = SpaceType(self.plugin, self, *E, **F)
        return x

    ## update this PSpace with another
    def update(self, *E, **F):
        """
        update this PSpace with another same as a python dict  
        """

        for e in E:

            if isinstance(e, PSpace):
                self._space_dict.update(e.params)
            elif e is None:
                pass
            else:
                self._space_dict.update(e)

        self._space_dict.update(**F)

        return

    ## number of elements the space represents
    def __len__(self):
        return self.size

    ## shallow copy
    def copy(self):
        """
        returns a shallow copy of self
        """
        SpaceType = type(self)
        return SpaceType(self.plugin, self._space_dict.copy())

    ## returns tr
    # @return bool
    def has_key(self, k):
        ' D.has_key(k) -> True if D has a key k, else False'
        return self._space_dict.has_key(k)

    ## shape of the underlying image
    def _get_shape(self):
        """
        Returns a list of the dimensions of the image of the underlying vector
        """
        shp = []

        N = 1
        while self.has_key("n%s" % N):
            val = self["n%s" % N]
            if val < 1:
                raise TypeError(
                    "shape parameter does not conform to SLIMpy standard:\n"
                    "Should be an int greater than 0")
            shp.append(val)
            N += 1

        if N is 1:
            return ()

        i = len(shp) - 1
        while i > 0 and shp[i] is 1:
            shp.pop(-1)
            i -= 1

        return shp

    ## setter for shape of the underlying image
    # @param shape is a sequence of integers
    def _set_shape(self, shape):
        """
        delete all n* keys and replace them with shape 
        """
        N = 1
        nN = "n%s" % N
        while self.has_key(nN):
            del self[nN]
            N += 1
            nN = "n%s" % N

        for N, val in enumerate(shape):
            nN = "n%s" % (N + 1)
            self[nN] = val

    ## returns size of field of scalars
    def get_size(self):

        mul = lambda x, y: x * y
        prod = lambda shape: reduce(mul, shape, 1)
        shape = self.shape

        for i in shape:
            if i == UnknownValue:
                return None
        if not shape:
            return 0
        else:
            return prod(shape)

    ## size property
    size = property(get_size)
    ## shape property
    shape = property(_get_shape, _set_shape)

    def _get_ndim(self):
        return len(self.shape)

    ndim = property(_get_ndim)

    ## list of keys
    def keys(self):
        return self._space_dict.keys()

    ## pop a key from the dict
    # @exception raises a KeyError if k is not a key in self and no default is given
    def pop(self, k, *default):
        return self._space_dict.pop(k, *default)

    def _get_params(self, keep_unknown=False):
        if keep_unknown:
            params = self._space_dict.copy()
        else:
            itemset = [(k, v) for (k, v) in self.iteritems()
                       if v is not UnknownValue]
            params = dict(itemset)

        return params

    params = property(_get_params)

    ## @see dict
    def iteritems(self):
        """
        iterates over key value pairs in self
        """
        return self._space_dict.iteritems()

    ## helper for intersection and union
    def _space_helper(self, other, error=False):
        set_self = set(self.keys())
        set_other = set(other.keys())

        intersect = set_self.intersection(set_other)
        diff_self = set_self.difference(set_other)
        diff_other = set_other.difference(set_self)

        new = {}
        for key in intersect:
            v1 = self[key]
            v2 = other[key]
            if not self.equal_keys(other, key):
                if error:
                    raise ValueError(
                        "item self[%(key)s] != other[%(key)s] ; %(v1)s != %(v2)s"
                    )
                else:
                    v1 = UnknownValue
                    v2 = UnknownValue

            if v1 == UnknownValue:
                new[key] = v2
            else:
                new[key] = v1

        SpaceType = type(self)
        inter_space = SpaceType(self.plugin, new)
        dspace_self = SpaceType(self.plugin,
                                [(key, self[key]) for key in diff_self])
        dspace_other = SpaceType(self.plugin,
                                 [(key, other[key]) for key in diff_other])

        return inter_space, dspace_self, dspace_other

    ## intersection of this space and another
    def intersection(self, other):
        """
        space.intersection( other ) -> Space
        
        returns a Space object that is the Intersection of self and other
        contains the restrictions of each space
        the shared restrictions in each must be equal:
        i.e. if self contains restriction n1=5 and other -> n1=6 
            intersection method will raise error.
        """
        inter_space, dspace_self, dspace_other = self._space_helper(
            other, True)

        inter_space.update(dspace_self, dspace_other)

        return inter_space

    def _union(self, other):
        """
        space.union( other ) -> Space 
        
        returns the union of this space with another 
        """
        inter_space, dspace_self, dspace_other = self._space_helper(
            other, False)

        return inter_space

    ## union of this space with another
    def union(self, other, *others):
        """
        space.union( other, *others ) -> Space 
        
        returns the union of this space with another
        that is only the restrictions that are the same in both spaces
         
        """

        next = self._union(other)

        for oths in others:
            next = next._union(oths)

        return next

    def __eq__(self, other):

        if type(self) != type(other):
            return False

        test1 = self._space_dict == other._space_dict
        test2 = self.plugin == other.plugin

        return test1 and test2

    ## True if self and other both have key k
    # @param other PSpace object
    # @param accept_unknown if the value at key k for either self or other is an slimpy_base.Core.Interface.PSpace.UnknownValue
    #     then  equal_keys returns accept_unknown
    def equal_keys(self, other, k, accept_unknown=True):
        """
        space.equal_keys( space2, key, accept_unknown=True ) -> bool
        returns true if key is present and equal in both spaces.
        If one value is UnknownValue then will return accept_unknown.
        """
        if self.has_key(k) and other.has_key(k):
            v1 = self[k]
            v2 = other[k]
            if v1 is UnknownValue or v2 is UnknownValue:
                return accept_unknown
            else:
                return v1 == v2
        else:
            return False

    def itervalues(self):
        return self._space_dict.itervalues()

    def values(self):
        return self._space_dict.values()

    def iterkeys(self):
        return self._space_dict.iterkeys()

    def setdefault(self, k, D=None):
        return self._space_dict.setdefault(k, D=D)

    def has_unknown(self):
        return len([v for v in self.itervalues() if v == UnknownValue])

    ## true if self is a subspace of other.
    def is_subspace(self, other, accept_unknown=True):
        """
        space.is_subspace(other) <--> space in other
        returns true if self is a subspace of other.
         
        """
        assert isinstance(other, PSpace)

        if self.plugin != other.plugin:
            return False

        key_set = set(self.keys())
        key_set_other = set(other.keys())

        issubset = key_set_other.issubset(key_set)

        if not issubset:
            return False

        for key in key_set_other:
            if not self.equal_keys(other, key, accept_unknown=accept_unknown):
                return False

        return True

    ## true if self is a superspace of other.
    def is_superspace(self, other, accept_unknown=True):
        return not self.is_subspace(other, accept_unknown=accept_unknown)

    ## true if self is a contains k
    # @param k a PSpace a key in self or a vector
    def __contains__(self, k):
        """
        S.__contains__( k ) <-> k in S
        """
        if isinstance(k, PSpace):
            return k.is_subspace(self)
        elif hasattr(k, 'space') and isinstance(k.space, PSpace):
            return k.space.is_subspace(self)
        else:
            return self._space_dict.__contains__(k)

    def __getitem__(self, item):
        return self._space_dict[item]

    def __delitem__(self, item):
        self._space_dict.__delitem__(item)

    def get(self, k, default=None):
        return self._space_dict.get(k, default)

    def __setitem__(self, item, val):
        self._space_dict[item] = val

    def __repr__(self):
        name = self.__class__.__name__
        return "%s( %s, %s )" % (name, repr(self.plugin), repr(
            self._space_dict))

    def __str__(self):
        dtype = self.get('data_type', "unknown data type")
        size = self.size
        name = self.__class__.__name__
        return "<%(name)s %(dtype)s:%(size)s>" % vars()