def custom_export(item=None):
        class StringWrapper:
            def __init__(self, string=""):
                self.string = string

            def writelines(self, lines):
                for line in lines:
                    self.string += line

            def write(self, thing):
                self.string += thing

            def close(self):
                pass

        outFile = StringWrapper()

        item = stripProxy(item)

        if isinstance(item, list):
            objectList = [stripProxy(element) for element in item]
        elif isinstance(item, tuple):
            objectList = [stripProxy(element) for element in item]
        elif isType(item, RegistrySliceProxy) or isType(item, RegistrySlice):
            objectList = [stripProxy(element) for element in item]
        elif isType(item, GangaList):
            objectList = [stripProxy(element) for element in item]
        else:
            objectList = [item]

        lineList = [
            "#Ganga# File created by Ganga - %s\n" % (time.strftime("%c")),
            "#Ganga#\n",
            "#Ganga# Object properties may be freely edited before reloading into Ganga\n",
            "#Ganga#\n",
            "#Ganga# Lines beginning #Ganga# are used to divide object definitions,\n",
            "#Ganga# and must not be deleted\n", "\n"
        ]
        outFile.writelines(lineList)

        nObject = 0
        for this_object in objectList:
            try:
                name = this_object._name
                category = this_object._category
                outFile.write("#Ganga# %s object (category: %s)\n" %
                              (name, category))
                this_object.printTree(outFile, "copyable")
                nObject = nObject + 1

            except AttributeError as err:
                raise err
            except Exception as err:
                raise err

        outFile.close()

        return outFile.string
Ejemplo n.º 2
0
def store_job(jid, bind_id, JOBS=None):
    """

    """
    user, password, db, host, port = 'postgres', 'ganga', 'jobs', 'localhost', 5432
    url = 'postgresql://{}:{}@{}:{}/{}'
    url = url.format(user, password, host, port, db)

    try:
        con = sqlalchemy.create_engine(url, client_encoding='utf8',  executemany_mode='batch')
        meta = sqlalchemy.MetaData(bind=con, reflect=True)
    except Exception as e:
        if "does not exist" in str(e):
            url = 'postgresql://{}:{}@{}:{}/template1'
            url = url.format(user, password, host, port)
            con = sqlalchemy.create_engine(url, client_encoding='utf8')
            meta = sqlalchemy.MetaData(bind=con, reflect=True)
        else:
            raise e

    if JOBS is None:
        JOBS = create_table(con, meta)


    job = stripProxy(jobs[jid])
    stdout = sys.stdout
    sys.stdout = io.StringIO()
    to_file(job)
    output = sys.stdout.getvalue()
    sys.stdout = stdout

    con.execute(JOBS.insert(), {"jid": bind_id, "jstring": output})
Ejemplo n.º 3
0
    def _splitter(self, job, inputdata):
        indata = job.inputdata
        if not job.inputdata:
            share_path = os.path.join(expandfilename(getConfig('Configuration')['gangadir']),
                                      'shared',
                                      getConfig('Configuration')['user'],
                                      job.application.is_prepared.name,
                                      'inputdata',
                                      'options_data.pkl')
            if os.path.exists(share_path):
                f=open(share_path,'r+b')
                indata = pickle.load(f)
                f.close()
            else:
                logger.error('Cannot split if no inputdata given!')
                raise SplittingError('job.inputdata is None and no inputdata found in optsfile')


        self.depth             = indata.depth
        self.persistency       = indata.persistency
        self.XMLCatalogueSlice = indata.XMLCatalogueSlice

        if stripProxy(job.backend).__module__.find('Dirac') > 0:
            if self.filesPerJob > 100: self.filesPerJob = 100 # see above warning
            return DiracSplitter(indata,
                                 self.filesPerJob,
                                 self.maxFiles,
                                 self.ignoremissing)
        else:
            return super(SplitByFilesAndRun,self)._splitter(job, indata)
Ejemplo n.º 4
0
    def _splitter(self, job, inputdata):
        indata = job.inputdata
        if not job.inputdata:
            share_path = os.path.join(expandfilename(getConfig('Configuration')['gangadir']),
                                      'shared',
                                      getConfig('Configuration')['user'],
                                      job.application.is_prepared.name,
                                      'inputdata',
                                      'options_data.pkl')
            if os.path.exists(share_path):
                f=open(share_path,'r+b')
                indata = pickle.load(f)
                f.close()
            else:
                logger.error('Cannot split if no inputdata given!')
                raise SplittingError('job.inputdata is None and no inputdata found in optsfile')


        self.depth             = indata.depth
        self.persistency       = indata.persistency
        self.XMLCatalogueSlice = indata.XMLCatalogueSlice

        if stripProxy(job.backend).__module__.find('Dirac') > 0:
            if self.filesPerJob > 100: self.filesPerJob = 100 # see above warning
            return DiracSplitter(indata,
                                 self.filesPerJob,
                                 self.maxFiles,
                                 self.ignoremissing)
        else:
            return super(SplitByFilesAndRun,self)._splitter(job, indata)
Ejemplo n.º 5
0
    def _create_subjob(self, job, dataset):
        if True in (isinstance(i,str) for i in dataset):
            dataset = [LogicalFile(file) for file in dataset]
        j=Job()
        j.copyFrom(stripProxy(job))
        j.splitter = None
        j.merger = None
        j.inputsandbox = [] ## master added automatically
        j.inputdata = LHCbDataset( files             = dataset[:],
                                   persistency       = self.persistency,
                                   depth             = self.depth )
        j.inputdata.XMLCatalogueSlice = self.XMLCatalogueSlice

        return j
Ejemplo n.º 6
0
    def _create_subjob(self, job, dataset):
        if True in (isinstance(i,str) for i in dataset):
            dataset = [LogicalFile(file) for file in dataset]
        j=Job()
        j.copyFrom(stripProxy(job))
        j.splitter = None
        j.merger = None
        j.inputsandbox = [] ## master added automatically
        j.inputdata = LHCbDataset( files             = dataset[:],
                                   persistency       = self.persistency,
                                   depth             = self.depth )
        j.inputdata.XMLCatalogueSlice = self.XMLCatalogueSlice

        return j
Ejemplo n.º 7
0
def store_job(jid, bind_id, db=None):
    """

    """

    if db is None:
        client = MongoClient('localhost', 27017)
        db = client.ganga_xml

    job = stripProxy(jobs[jid])

    stdout = sys.stdout
    sys.stdout = io.StringIO()

    to_file(job)

    # get output and restore sys.stdout
    output = sys.stdout.getvalue()
    sys.stdout = stdout

    db.jobs.insert({"jid": bind_id, "string": output})
Ejemplo n.º 8
0
def check_job_until_completed(j):
    from GangaCore.GPIDev.Base.Proxy import stripProxy
    j = stripProxy(j)
    if j.master is not None:
        j = j.master

    from time import sleep
    from GangaCore.Core import monitoring_component
    from GangaCore.Core.GangaRepository import getRegistryProxy

    jobs = getRegistryProxy('jobs')

    timeout = 60
    sleep_period = 1
    current_status = None
    state = 'completed'
    break_states = None
    verbose = True
    while j.status != state and timeout > 0:
        if not monitoring_component.isEnabled():
            monitoring_component.runMonitoring(jobs=jobs.select(j.id, j.id))
        else:
            monitoring_component.alive = True
            monitoring_component.enabled = True
            monitoring_component.steps = -1
            monitoring_component.__updateTimeStamp = 0
            monitoring_component.__sleepCounter = -0.5
        if verbose and j.status != current_status:
            print("Job %s: status = %s" % (str(j.id), str(j.status)))
        if current_status is None:
            current_status = j.status
        if type(break_states) == type([]) and j.status in break_states:
            print("Job finished with status: %s" % j.status)
            break
        sleep(sleep_period)
        timeout -= sleep_period
    return True