Пример #1
0
def _runCommand(_ctype, _baseDir, command, _phantomConfig):
    def _reraise(stderr):
        raise RunCommandError(''.join(stderr))
    stderr = []
    return commands.runProcess(commands.shell(command),
                               stderrf=stderr.append,
                               log=True).addErrback(lambda _ : _reraise(stderr))
Пример #2
0
def rsyncTo(host, src, dst, files=None, options=None, user=None, log=False):
    """
    Transfers `files` from `host` in directory `src` to directory `dst`.  If
    `files` is None then it is assumed `src` is the complete path to be copied
    otherwise it is believed to be a list of files in `src`.
    """
    cmd = ['rsync']
    if files:
        cmd.append('--files-from=-')
        initialText = '\n'.join(files)
    else:
        initialText = None

    if options:
        cmd.append(options)

    if user:
        host = user + '@' + host

    cmd.extend([src, host + ':' + dst])

    return commands.runProcess(commands.shell(str(' '.join(cmd))),
                               expected=[0, 23],
                               stderrf=python.log.err,
                               initialText=str(initialText),
                               log=log)
Пример #3
0
def rsyncFrom(host, src, dst, files=None, options=None, user=None, log=False):
    """
    Copies `files` from `host` in location `src` to the local `dst`.  If `files`
    is None then `src` is assumed to be the actual path to copy.
    """
    cmd = ['rsync']
    if files:
        cmd.append('--files-from=-')
        initialText = '\n'.join(files)
    else:
        initialText = None

    if options:
        cmd.append(options)

    if user:
        host = user + '@' + host

    cmd.extend([host + ':' + src, dst])

    return commands.runProcess(commands.shell(str(' '.join(cmd))),
                               expected=[0, 23],
                               stderrf=python.log.err,
                               initialText=str(initialText),
                               log=log)
Пример #4
0
def _compressFiles(tag, compressDir):
    compressedFile = os.path.join(compressDir, tag.tagName + '.tar.gz')

    yield commands.runProcess(['mkdir', '-p', compressDir], stderrf=log.err)
    
    baseDirFiles, nonBaseDirFiles = _partitionFiles(tag.files, tag.metadata['tag_base_dir'])

    cmd = ['tar',
           '-C', tag.metadata['tag_base_dir'],
           '-czhf', compressedFile,
           '--files-from=-']

    yield commands.runProcess(cmd,
                              stderrf=log.err,
                              initialText=str('\n'.join(baseDirFiles + nonBaseDirFiles)))
    defer.returnValue(compressedFile)
Пример #5
0
def rsyncFrom(host, src, dst, files=None, options=None, user=None, log=False):
    """
    Copies `files` from `host` in location `src` to the local `dst`.  If `files`
    is None then `src` is assumed to be the actual path to copy.
    """
    cmd = ['rsync']
    if files:
        cmd.append('--files-from=-')
        initialText = '\n'.join(files)
    else:
        initialText = None
        
    if options:
        cmd.append(options)

    if user:
        host = user + '@' + host

    cmd.extend([host + ':' + src, dst])

    return commands.runProcess(commands.shell(str(' '.join(cmd))),
                               expected=[0, 23],
                               stderrf=python.log.err,
                               initialText=str(initialText),
                               log=log)    
Пример #6
0
def rsyncTo(host, src, dst, files=None, options=None, user=None, log=False):
    """
    Transfers `files` from `host` in directory `src` to directory `dst`.  If
    `files` is None then it is assumed `src` is the complete path to be copied
    otherwise it is believed to be a list of files in `src`.
    """
    cmd = ['rsync']
    if files:
        cmd.append('--files-from=-')
        initialText = '\n'.join(files)
    else:
        initialText = None
        
    if options:
        cmd.append(options)

    if user:
        host = user + '@' + host

    cmd.extend([src, host + ':' + dst])

    return commands.runProcess(commands.shell(str(' '.join(cmd))),
                               expected=[0, 23],
                               stderrf=python.log.err,
                               initialText=str(initialText),
                               log=log)
Пример #7
0
def runWithCred(cred, cmd, stdoutf=logging.OUTSTREAM.write, stderrf=logging.ERRSTREAM.write, log=False):
    cmdPrefix = ''
    if hasattr(cred, 'ec2Path'):
        cmdPrefix = cred.ec2Path + '/'

    cmd = addCredInfo(cmd, cred)
    cmd[0] = cmdPrefix + cmd[0]
    return commands.runProcess(cmd, stdoutf=stdoutf, stderrf=stderrf, log=log, addEnv=cred.env)
Пример #8
0
def setSlotsForQueue(queue, hostname, slots):
    return commands.runProcess(['qconf',
                                '-rattr',
                                'queue',
                                'slots',
                                '[%s=%d]' % (hostname, slots),
                                queue],
                               log=True)
Пример #9
0
def run(state, pipeline):
    if not os.path.exists(state.conf('config.pipeline_configs')):
        os.mkdir(state.conf('config.pipeline_configs'))

    tmpConfigName = os.path.join(state.conf('config.pipeline_configs'), str(time.time()) + '.config')

    pipeline.config = config.configFromMap({'CONFIG_FILE': tmpConfigName},
                                           base=pipeline.config)
    
    fout = open(tmpConfigName, 'w')
    
    # We want to produce an ini like file with [section]'s
    sections = {}
    for k in pipeline.config.keys():
        sections.setdefault('.'.join(k.split('.')[:-1]), []).append(k)

    for s, ks in sections.iteritems():
        if s not in ['', 'env']:        
            fout.write('[' + s + ']\n')
            for k in ks:
                shortK = k.split('.')[-1]
                fout.write('%s=%s\n' % (shortK, str(pipeline.config(k))))

    fout.close()

    templateDir = os.path.join(state.machineconf('dirs.clovr_pipelines_template_dir'),
                               pipeline.protocol)
    templateConfig = os.path.join(templateDir, 'pipeline_tmpl.config')
    templateLayout = os.path.join(templateDir, 'pipeline.layout')

    tmpPipelineConfig = os.path.join(state.conf('config.pipeline_configs'), str(time.time()) + '.pipeline.config')
    fout = open(tmpPipelineConfig, 'w')
    for line in handleIncludes(open(templateConfig)):
        fout.write(config.replaceStr(line, pipeline.config) + '\n')

    fout.close()

    cmd = ['run_pipeline.pl',
           '--config=' + tmpPipelineConfig,
           '--templatelayout=' + templateLayout,
           '--taskname=' + pipeline.taskName]

    if pipeline.queue:
        cmd.append('--queue=' + pipeline.queue)

    
    stdout = StringIO.StringIO()
    stderr = StringIO.StringIO()
    
    yield commands.runProcess(cmd,
                              stdoutf=stdout.write,
                              stderrf=stderr.write)

    pipelineId = stdout.getvalue().strip()
    if not pipelineId:
        raise commands.ProgramRunError(cmd, stderr.getvalue())

    defer.returnValue(pipeline.update(pipelineId=pipelineId))
Пример #10
0
def _sharedFoldersEnabled(vmType):
    if vmType == 'vmware':
        stdout = StringIO.StringIO()
        yield commands.runProcess(['df'],
                                  expected=[0, 1],
                                  stdoutf=stdout.write)
        defer.returnValue('.host:/shared' in stdout.getvalue())
    else:
        defer.returnValue(False)
Пример #11
0
def _unzipFile(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(commands.shell('mkdir -p %s && unzip -o -d %s %s' % (os.path.splitext(fname)[0],
                                                                                   os.path.splitext(fname)[0],
                                                                                   fname)),
                              stdoutf=stdout.write,
                              stderrf=log.err)
    log.msg(stdout.getvalue())
    defer.returnValue([str(i.strip().replace('extracting: ', '').replace('inflating: ', '')) for i in stdout.getvalue().split('\n') if ('extracting' in i or 'inflating' in i)])
Пример #12
0
def _compressFiles(tag, compressDir):
    compressedFile = os.path.join(compressDir, tag.tagName + '.tar.gz')

    yield commands.runProcess(['mkdir', '-p', compressDir], stderrf=log.err)

    baseDirFiles, nonBaseDirFiles = _partitionFiles(
        tag.files, tag.metadata['tag_base_dir'])

    cmd = [
        'tar', '-C', tag.metadata['tag_base_dir'], '-czhf', compressedFile,
        '--files-from=-'
    ]

    yield commands.runProcess(cmd,
                              stderrf=log.err,
                              initialText=str('\n'.join(baseDirFiles +
                                                        nonBaseDirFiles)))
    defer.returnValue(compressedFile)
Пример #13
0
def _getPatchList(patchUrl, currentMajorVersion, currentPatchVersion):
    yield commands.runProcess(['mkdir', '-p', '/tmp/patches'])

    nextPatchVersion = currentPatchVersion + 1
    nextPatch = '%s-p%d' % (currentMajorVersion, nextPatchVersion)

    try:
        yield commands.runProcess([
            'svn', 'export',
            '%s/%s' % (patchUrl, nextPatch),
            '/tmp/patches/%s' % nextPatch
        ],
                                  log=True)
        morePatches = yield _getPatchList(patchUrl, currentMajorVersion,
                                          nextPatchVersion)
        defer.returnValue([nextPatch] + morePatches)
    except commands.ProgramRunError:
        defer.returnValue([])
Пример #14
0
def _untargzFile(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(
        ['tar', '-C', os.path.dirname(fname), '-zxvf', fname],
        stdoutf=stdout.write,
        stderrf=log.err)
    defer.returnValue([
        str(os.path.join(os.path.dirname(fname), i.strip()))
        for i in stdout.getvalue().split('\n')
    ])
Пример #15
0
def _bunzip2File(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(commands.shell('bzcat %s | tar -C %s -xv' %
                                             (fname, os.path.dirname(fname))),
                              stdoutf=stdout.write,
                              stderrf=log.err)
    defer.returnValue([
        str(os.path.join(os.path.dirname(fname), i.strip()))
        for i in stdout.getvalue().split('\n')
    ])
Пример #16
0
    def _run(text, tasklet):
        stdout = StringIO.StringIO()
        stderr = StringIO.StringIO()

        p = commands.runProcess(parseCmd(tasklet),
                                initialText=text,
                                stdoutf=stdout.write,
                                stderrf=stderr.write)
        p.addCallback(lambda _ : tasks.updateTask(taskName,
                                                  lambda t : t.progress()))
        p.addCallback(lambda _ : stdout.getvalue())
        p.addErrback(lambda _ : _raise(tasklet, stderr.getvalue()))
        return p
Пример #17
0
    def _run(text, tasklet):
        stdout = StringIO.StringIO()
        stderr = StringIO.StringIO()

        p = commands.runProcess(parseCmd(tasklet),
                                initialText=text,
                                stdoutf=stdout.write,
                                stderrf=stderr.write)
        p.addCallback(
            lambda _: tasks.updateTask(taskName, lambda t: t.progress()))
        p.addCallback(lambda _: stdout.getvalue())
        p.addErrback(lambda _: _raise(tasklet, stderr.getvalue()))
        return p
Пример #18
0
def _replaceTemplateKeys(templateXml, componentConfig, instanceXml):
    return commands.runProcess(
        [
            "replace_template_keys",
            "--template_xml",
            templateXml,
            "--component_conf",
            componentConfig,
            "--output_xml",
            instanceXml,
        ],
        log=True,
    )
Пример #19
0
def _unzipFile(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(commands.shell(
        'mkdir -p %s && unzip -o -d %s %s' %
        (os.path.splitext(fname)[0], os.path.splitext(fname)[0], fname)),
                              stdoutf=stdout.write,
                              stderrf=log.err)
    log.msg(stdout.getvalue())
    defer.returnValue([
        str(i.strip().replace('extracting: ', '').replace('inflating: ', ''))
        for i in stdout.getvalue().split('\n')
        if ('extracting' in i or 'inflating' in i)
    ])
Пример #20
0
def tagData(state, tagName, taskName, files, metadata, action, recursive, expand, compressDir, filterF=None, deleteOnExpand=False):
    if not os.path.exists(state.conf('tags.tags_directory')):
        yield commands.runProcess(['mkdir', '-p', state.conf('tags.tags_directory')])

    files = yield _generateFileList(files, recursive, expand, deleteOnExpand)

    if action == ACTION_APPEND:
        try:
            tag = yield state.tagPersist.loadTag(tagName)
            metadata = func.updateDict(tag.metadata, metadata)
            oldFiles = set(tag.files)
        except persist.TagNotFoundError:
            oldFiles = set()
    else:
        oldFiles = set()

    if 'tag_base_dir' not in metadata:
        metadata['tag_base_dir'] = '/'
    files = [f
             for f in files
             if f not in oldFiles and (not filterF or filterF and filterF(f))]

    files += oldFiles

    # Remove any dups
    files = list(set(files))
    
    tag = persist.Tag(tagName=tagName,
                      files=files,
                      metadata=metadata,
                      phantom=None,
                      taskName=taskName)
    

    if compressDir:
        compressedFile = yield _compressFiles(tag, compressDir)
        tag.metadata = func.updateDict(tag.metadata,
                                       {'compressed': True,
                                        'compressed_file': compressedFile})
    else:
        tag.metadata = func.updateDict(tag.metadata,
                                       {'compressed': False})

    yield state.tagPersist.saveTag(tag)

    # The tag we saved at phantom set to None, but this could be a
    # phantom tag, in which case we are going to reload it from disk
    # then cache that in order to load any phantom information
    tag = yield state.tagPersist.loadTag(tag.tagName)
    
    defer.returnValue(tag)
Пример #21
0
def _runWorkflow(instanceXml):
    stderr = StringIO.StringIO()

    cmd = ['RunWorkflow',
           '-i', instanceXml,
           '-m', '1',
           '--init-heap=100m',
           '--max-heap=1024m',
           '--logconf=/opt/workflow-sforge/log4j.properties',
           '--debug']
    
    def _raiseProgramError(_):
        raise commands.ProgramRunError(cmd, stderr.getvalue())
    
    return commands.runProcess(cmd,
                               stderrf=stderr.write,
                               log=True).addErrback(_raiseProgramError)
Пример #22
0
def _runWorkflow(instanceXml):
    stderr = StringIO.StringIO()

    cmd = [
        "RunWorkflow",
        "-i",
        instanceXml,
        "-m",
        "1",
        "--init-heap=100m",
        "--max-heap=1024m",
        "--logconf=/opt/workflow-sforge/log4j.properties",
        "--debug",
    ]

    def _raiseProgramError(_):
        raise commands.ProgramRunError(cmd, stderr.getvalue())

    return commands.runProcess(cmd, stderrf=stderr.write, log=True).addErrback(_raiseProgramError)
Пример #23
0
def runProcessSSH(host, cmd, stdoutf, stderrf, sshUser=None, sshFlags=None, initialText=None, log=False, **kwargs):
    command = ['ssh']
    if sshUser:
        host = sshUser + '@' + host

    if sshFlags:
        command.append(sshFlags)

    command.append(host)

    command.append(core.quoteEscape(cmd))

    command = ' '.join(command)

    return commands.runProcess(commands.shell(str(command)),
                               stdoutf=stdoutf,
                               stderrf=stderrf,
                               initialText=str(initialText),
                               log=log,
                               **kwargs)
Пример #24
0
def handleRealizePhantom(request):
    yield tasks_tx.updateTask(request.body['task_name'],
                              lambda t : t.setState(tasks_tx.task.TASK_RUNNING).update(numTasks=1))
    
    localClusters = yield www_clusters.listClusters('localhost',
                                                   {'cluster_name': 'local'},
                                                    request.body['user_name'])
    localCluster = localClusters[0]
    ctype = localCluster['config']['general.ctype']

    dstTagPath = os.path.join(localCluster['config']['dirs.upload_dir'],
                              request.body['tag_name'])

    metadata = func.updateDict(request.body['metadata'], {'tag_base_dir': dstTagPath})
    
    yield commands.runProcess(['mkdir', '-p', dstTagPath])

    try:
        yield _realizePhantom(ctype, dstTagPath, request.body['phantom'])
    except RunCommandError, err:
        yield tasks_tx.updateTask(request.body['task_name'],
                                  lambda t : t.addMessage(tasks_tx.task.MSG_ERROR, str(err)))
        raise err
Пример #25
0
 def _rmDir(d):
     return commands.runProcess(
         ['rmdir', d], stderrf=log.err).addErrback(lambda _: None)
Пример #26
0
 def _rmDir(d):
     return commands.runProcess(["rmdir", d], stderrf=log.err).addErrback(lambda _: None)
Пример #27
0
def _replaceComponentConfigKeys(componentConfig, outComponentConfig):
    return commands.runProcess(
        ["replace_config_keys", "--template_conf", componentConfig, "--output_conf", outComponentConfig], log=True
    )
Пример #28
0
def _replaceComponentConfigKeys(componentConfig, outComponentConfig):
    return commands.runProcess(['replace_config_keys',
                                '--template_conf', componentConfig,
                                '--output_conf', outComponentConfig],
                               log=True)
Пример #29
0
def instantiateCredential(conf, cred):
    if not conf('config_loaded', default=False):
        conf = config.configFromConfig(conf,
                                       base=config.configFromStream(open(conf('conf_file')),
                                                                    base=conf))

    certFile = os.path.join(conf('general.secure_tmp'), cred.name + '_cert.pem')
    keyFile = os.path.join(conf('general.secure_tmp'), cred.name + '_key.pem')

    mainDeferred = defer.succeed(None)
    
    if not os.path.exists(certFile) and not os.path.exists(keyFile):
        tmpCertFile = os.path.join(conf('general.secure_tmp'), cred.name + '_cert-tmp.pem')
        tmpKeyFile = os.path.join(conf('general.secure_tmp'), cred.name + '_key-tmp.pem')
        if 'ec2_url' not in cred.metadata:
            return defer.fail(Exception('You must have an ec2_url'))
        parsedUrl = urlparse.urlparse(cred.metadata['ec2_url'])
        if ':' not in parsedUrl.netloc:
            return defer.fail(Exception('Your URL must contain a port'))
        host, port = parsedUrl.netloc.split(':')
        fout = open(tmpCertFile, 'w')
        fout.write(cred.cert)
        fout.close()
        fout = open(tmpKeyFile, 'w')
        fout.write(cred.pkey)
        fout.close()
        d = commands.runProcess(['nimbusCerts2EC2.py',
                                 '--in-cert=' + tmpCertFile,
                                 '--out-cert=' + certFile,
                                 '--in-key=' + tmpKeyFile,
                                 '--out-key=' + keyFile,
                                 '--java-cert-dir=/tmp',
                                 '--java-cert-host=' + host,
                                 '--java-cert-port=' + port],
                                stdoutf=None,
                                stderrf=None,
                                log=True)

        def _chmod(_exitCode):
            return commands.runProcess(['chmod', '+r', keyFile], stdoutf=None, stderrf=None)

        d.addCallback(_chmod)

        def _unlink(v):
            os.unlink(tmpCertFile)
            os.unlink(tmpKeyFile)
            return v

        d.addCallback(_unlink)
        d.addErrback(_unlink)

        mainDeferred.addCallback(lambda _ : d)
        
    ec2Home = cred.metadata.get('ec2_api_tools', '/opt/ec2-api-tools-1.3-57419')
    newCred = func.Record(name=cred.name, conf=conf, cert=certFile, pkey=keyFile, ec2Path=os.path.join(ec2Home, 'bin'),
                          env=dict(EC2_JVM_ARGS='-Djavax.net.ssl.trustStore=/tmp/jssecacerts',
                                   EC2_HOME=ec2Home,
                                   EC2_URL=cred.metadata['ec2_url']))

    if os.path.exists(conf('cluster.cluster_private_key') + '.pub'):
        pubKey = open(conf('cluster.cluster_private_key') + '.pub').read().rstrip()
        def _addKeypair():
            keyPairDefer = ec2.addKeypair(newCred, conf('cluster.key') + '||' + pubKey)
            def _printError(f):
                log.msg('Adding keypair failed, retrying')
                log.err(f)
                return f
            keyPairDefer.addErrback(_printError)
            return keyPairDefer
        mainDeferred.addCallback(lambda _ : defer_utils.tryUntil(10, _addKeypair, onFailure=defer_utils.sleep(30)))
        
    mainDeferred.addCallback(lambda _ : newCred)
    return mainDeferred
Пример #30
0
def instantiateCredential(conf, cred):
    if not conf('config_loaded', default=False):
        conf = config.configFromConfig(conf,
                                       base=config.configFromStream(open(
                                           conf('conf_file')),
                                                                    base=conf))

    certFile = os.path.join(conf('general.secure_tmp'),
                            cred.name + '_cert.pem')
    keyFile = os.path.join(conf('general.secure_tmp'), cred.name + '_key.pem')

    mainDeferred = defer.succeed(None)

    if not os.path.exists(certFile) and not os.path.exists(keyFile):
        tmpCertFile = os.path.join(conf('general.secure_tmp'),
                                   cred.name + '_cert-tmp.pem')
        tmpKeyFile = os.path.join(conf('general.secure_tmp'),
                                  cred.name + '_key-tmp.pem')
        if 'ec2_url' not in cred.metadata:
            return defer.fail(Exception('You must have an ec2_url'))
        parsedUrl = urlparse.urlparse(cred.metadata['ec2_url'])
        if ':' not in parsedUrl.netloc:
            return defer.fail(Exception('Your URL must contain a port'))
        host, port = parsedUrl.netloc.split(':')
        fout = open(tmpCertFile, 'w')
        fout.write(cred.cert)
        fout.close()
        fout = open(tmpKeyFile, 'w')
        fout.write(cred.pkey)
        fout.close()
        d = commands.runProcess([
            'nimbusCerts2EC2.py', '--in-cert=' + tmpCertFile,
            '--out-cert=' + certFile, '--in-key=' + tmpKeyFile,
            '--out-key=' + keyFile, '--java-cert-dir=/tmp',
            '--java-cert-host=' + host, '--java-cert-port=' + port
        ],
                                stdoutf=None,
                                stderrf=None,
                                log=True)

        def _chmod(_exitCode):
            return commands.runProcess(['chmod', '+r', keyFile],
                                       stdoutf=None,
                                       stderrf=None)

        d.addCallback(_chmod)

        def _unlink(v):
            os.unlink(tmpCertFile)
            os.unlink(tmpKeyFile)
            return v

        d.addCallback(_unlink)
        d.addErrback(_unlink)

        mainDeferred.addCallback(lambda _: d)

    ec2Home = cred.metadata.get('ec2_api_tools',
                                '/opt/ec2-api-tools-1.3-57419')
    newCred = func.Record(
        name=cred.name,
        conf=conf,
        cert=certFile,
        pkey=keyFile,
        ec2Path=os.path.join(ec2Home, 'bin'),
        env=dict(EC2_JVM_ARGS='-Djavax.net.ssl.trustStore=/tmp/jssecacerts',
                 EC2_HOME=ec2Home,
                 EC2_URL=cred.metadata['ec2_url']))

    if os.path.exists(conf('cluster.cluster_private_key') + '.pub'):
        pubKey = open(conf('cluster.cluster_private_key') +
                      '.pub').read().rstrip()

        def _addKeypair():
            keyPairDefer = ec2.addKeypair(newCred,
                                          conf('cluster.key') + '||' + pubKey)

            def _printError(f):
                log.msg('Adding keypair failed, retrying')
                log.err(f)
                return f

            keyPairDefer.addErrback(_printError)
            return keyPairDefer

        mainDeferred.addCallback(lambda _: defer_utils.tryUntil(
            10, _addKeypair, onFailure=defer_utils.sleep(30)))

    mainDeferred.addCallback(lambda _: newCred)
    return mainDeferred
Пример #31
0
def _ungzFile(fname):
    yield commands.runProcess(commands.shell('gzip -dc %s > %s' %
                                             (fname, fname[:-3])),
                              stderrf=log.err)
    defer.returnValue(str(fname[:-3]))
Пример #32
0
import sys

from twisted.internet import reactor

from igs_tx.utils import commands

d = commands.runProcess(['date'], stdoutf=None, stderrf=None, expected=[0])

d.addCallback(lambda _: sys.stdout.write('FINISHED!\n')).addErrback(
    lambda r: sys.stdout.write('errrr??? %d\n' % r.exitCode)).addCallback(
        lambda _: reactor.stop())

reactor.run()
Пример #33
0
import sys

from twisted.internet import reactor

from igs_tx.utils import commands

d = commands.runProcess(['date'], stdoutf=None, stderrf=None, expected=[0])

d.addCallback(lambda _ : sys.stdout.write('FINISHED!\n')).addErrback(lambda r : sys.stdout.write('errrr??? %d\n' % r.exitCode)).addCallback(lambda _ : reactor.stop())

reactor.run()
Пример #34
0
def tagData(state,
            tagName,
            taskName,
            files,
            metadata,
            action,
            recursive,
            expand,
            compressDir,
            filterF=None,
            deleteOnExpand=False):
    if not os.path.exists(state.conf('tags.tags_directory')):
        yield commands.runProcess(
            ['mkdir', '-p', state.conf('tags.tags_directory')])

    files = yield _generateFileList(files, recursive, expand, deleteOnExpand)

    if action == ACTION_APPEND:
        try:
            tag = yield state.tagPersist.loadTag(tagName)
            metadata = func.updateDict(tag.metadata, metadata)
            oldFiles = set(tag.files)
        except persist.TagNotFoundError:
            oldFiles = set()
    else:
        oldFiles = set()

    if 'tag_base_dir' not in metadata:
        metadata['tag_base_dir'] = '/'
    files = [
        f for f in files
        if f not in oldFiles and (not filterF or filterF and filterF(f))
    ]

    files += oldFiles

    # Remove any dups
    files = list(set(files))

    tag = persist.Tag(tagName=tagName,
                      files=files,
                      metadata=metadata,
                      phantom=None,
                      taskName=taskName)

    if compressDir:
        compressedFile = yield _compressFiles(tag, compressDir)
        tag.metadata = func.updateDict(tag.metadata, {
            'compressed': True,
            'compressed_file': compressedFile
        })
    else:
        tag.metadata = func.updateDict(tag.metadata, {'compressed': False})

    yield state.tagPersist.saveTag(tag)

    # The tag we saved at phantom set to None, but this could be a
    # phantom tag, in which case we are going to reload it from disk
    # then cache that in order to load any phantom information
    tag = yield state.tagPersist.loadTag(tag.tagName)

    defer.returnValue(tag)
Пример #35
0
def _ungzFile(fname):
    yield commands.runProcess(commands.shell('gzip -dc %s > %s' % (fname, fname[:-3])),
                              stderrf=log.err)
    defer.returnValue(str(fname[:-3]))
Пример #36
0
 def _chmod(_exitCode):
     return commands.runProcess(['chmod', '+r', keyFile],
                                stdoutf=None,
                                stderrf=None)
Пример #37
0
def _bunzip2File(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(commands.shell('bzcat %s | tar -C %s -xv' % (fname, os.path.dirname(fname))),
                              stdoutf=stdout.write,
                              stderrf=log.err)
    defer.returnValue([str(os.path.join(os.path.dirname(fname), i.strip())) for i in stdout.getvalue().split('\n')])
Пример #38
0
def setSlotsForQueue(queue, hostname, slots):
    return commands.runProcess([
        'qconf', '-rattr', 'queue', 'slots',
        '[%s=%d]' % (hostname, slots), queue
    ],
                               log=True)
Пример #39
0
def _untargzFile(fname):
    stdout = StringIO.StringIO()
    yield commands.runProcess(['tar', '-C', os.path.dirname(fname), '-zxvf', fname],
                              stdoutf=stdout.write,
                              stderrf=log.err)
    defer.returnValue([str(os.path.join(os.path.dirname(fname), i.strip())) for i in stdout.getvalue().split('\n')])
Пример #40
0
def _replaceTemplateKeys(templateXml, componentConfig, instanceXml):
    return commands.runProcess(['replace_template_keys',
                                '--template_xml', templateXml,
                                '--component_conf', componentConfig,
                                '--output_xml', instanceXml],
                               log=True)
Пример #41
0
 def _chmod(_exitCode):
     return commands.runProcess(['chmod', '+r', keyFile], stdoutf=None, stderrf=None)