예제 #1
0
def upload(all, a, file, f):
    if not os.path.exists(cfg):
        print(__msg['no_file'])
        return

    remote, plex, server, user, password = config.get_config()
    # check = input('Please verify your password: '******'{}@{}:{}'.format(user, server, remote))
    else:
        if os.path.exists(os.path.abspath(plex)):
            for file in (os.listdir(plex)):
                config.test(
                    os.path.abspath(plex + '/' + file) or f,
                    '{}@{}:{}'.format(user, server, remote))
예제 #2
0
def main():

    from config import test

    db = Dropbox(test())
    db.processAllFiles()
    db.shutdown()
예제 #3
0
def main():

    from config import test

    db = Dropbox( test() )
    db.processAllFiles()
    db.shutdown()
예제 #4
0
def test():
    print "* File test starts"
    cfg = config.test()
    d = FileDownloader(cfg)
    stat, nFiles = d.getFileList()
    if not stat:
        return -1
    if nFiles > 0:
        d.downloadAll()
예제 #5
0
def test():
    print "* File test starts"
    cfg=config.test()
    d = FileDownloader( cfg )
    stat, nFiles = d.getFileList()
    if not stat:
        return -1
    if nFiles > 0:
        d.downloadAll()
예제 #6
0
def test() :
    from config import test

    url = 'https://user.web.cern.ch/user/Welcome.asp'

    c = Curler(test())
    print " url = ", url, ' returned ', len( c.get(url) ), ' bytes.'

    url = 'https://cms-conddb-dev.cern.ch/getLumi/'

    print " url = ", url, ' returned ', len( c.get( url ) ), ' items in the JSON list.'
예제 #7
0
def test():
    from config import test

    url = 'https://user.web.cern.ch/user/Welcome.asp'

    c = Curler(test())
    print " url = ", url, ' returned ', len(c.get(url)), ' bytes.'

    url = 'https://cms-conddb-dev.cern.ch/getLumi/'

    print " url = ", url, ' returned ', len(
        c.get(url)), ' items in the JSON list.'
예제 #8
0
def main():
    '''Runs the dropBox forever.
    '''

    logging.info('Starting...')

    fqdn = socket.getfqdn()

    if fqdn == 'srv-C2C05-11.cms':
        logging.info('Using tier0 configuration.')
        dropBoxConfig = config.tier0()
    elif fqdn == 'srv-C2C05-15.cms':
        logging.info('Using online configuration.')
        dropBoxConfig = config.online()
    elif fqdn == 'vocms226.cern.ch':
        logging.info('Using offline configuration.')
        dropBoxConfig = config.offline()
    elif fqdn == 'vocms225.cern.ch':
        logging.warning('Using offline configuration (this is the hot-spare! -- should not be started unless something happened with vocms226).')
        dropBoxConfig = config.offline()
    elif fqdn.endswith('.cern.ch'):
        logging.info('Using test configuration.')
        dropBoxConfig = config.test()
    else:
        raise Exception('Not running at CERN.')

    logging.info('Configuring object...')
    dropBox = Dropbox.Dropbox(dropBoxConfig)

    logging.info('Configuring TERM handler...')
    signal.signal(signal.SIGTERM, handleTERM)

    logging.info('Running loop...')
    while not stop:
        logging.info('Processing all files...')
        dropBox.processAllFiles()

        # Avoid the delay if we just finished processing
        if stop:
            break

        if dropBoxConfig.delay:
            logging.info('Processing all files done; waiting %s seconds for the next run.', dropBoxConfig.delay)
            time.sleep( dropBoxConfig.delay )
        else:  # if delay is not set, it means we're Tier-0 and need to run at next 10 min interval:
            sleepTime = secUntilNext10Min()
            logging.info('Processing all files done; waiting %s seconds for the next run.', sleepTime)
            time.sleep( sleepTime )

    logging.info('Stopping...')
예제 #9
0
def test():
    from config import test

    someHash = '42' * 20

    print ' == '
    print ' -- '
    su = StatusUpdater(test())

    print ' -- '
    su.updateFileStatus(someHash, 2000)

    print ' -- '
    su.uploadFileLog(someHash, "dummy log string")

    print ' -- '
    try:
        su.updateRunStatus(1000)
    except Exception, e:
        print "ERROR updating run status: " + str(e)
예제 #10
0
def test() :
    from config import test

    someHash = '42'*20

    print ' == '
    print ' -- '
    su = StatusUpdater( test() )

    print ' -- '
    su.updateFileStatus(someHash, 2000 )

    print ' -- '
    su.uploadFileLog(someHash, "dummy log string")

    print ' -- '
    try:
        su.updateRunStatus( 1000 )
    except Exception, e:
        print "ERROR updating run status: " + str(e)
예제 #11
0
    su.updateFileStatus(someHash, 2000)

    print ' -- '
    su.uploadFileLog(someHash, "dummy log string")

    print ' -- '
    try:
        su.updateRunStatus(1000)
    except Exception, e:
        print "ERROR updating run status: " + str(e)

    print ' -- '
    try:
        su.uploadRunLog("dummy download log string", 'dummy global log string')
    except Exception, e:
        print "ERROR uploading run logs: " + str(e)

    print ' -- '
    try:
        su.updateRunRunInfo(1234, 4321)
    except Exception, e:
        print "ERROR uploading run run info: " + str(e)

    print ' -- '
    del su
    print ' == '


if __name__ == '__main__':
    test()
예제 #12
0
                    'username': service.secrets['onlineUser']['user'],
                    'password': service.secrets['onlineUser']['password'],
                })
                result = self.curl.query(url, data)
            else:
                # In other cases, raise as usual
                raise

        try :
            result = json.loads(result)
        except Exception, e :
            if "No JSON object could be decoded" not in str( e ) :
                raise e

        return result

def test() :
    from config import test

    url = 'https://user.web.cern.ch/user/Welcome.asp'

    c = Curler(test())
    print " url = ", url, ' returned ', len( c.get(url) ), ' bytes.'

    url = 'https://cms-conddb-dev.cern.ch/getLumi/'

    print " url = ", url, ' returned ', len( c.get( url ) ), ' items in the JSON list.'

if __name__ == '__main__' :
    test( )
예제 #13
0
 def __init__(self,sys_argv):
     super(App, self).__init__(sys_argv)
     self.model=Model()
     self.config=config.test()
     self.main_controller=Controller(self.model,self.config)
     self.main_controller.show_login()
예제 #14
0
    new_network.id = next_network_id
    next_innov_num = new_network.mutate(next_innov_num)
    return new_network


running = True
while running:
    # start new round
    cfg.start_round()
    num_test = 0
    total_score = 0
    results = {}
    print("TRAINING...")
    for network_id in list(neural_networks.keys()):
        num_test += 1
        results[network_id] = cfg.test(neural_networks[network_id])
        total_score += results[network_id]
    mean_score = round(total_score / num_test, 2)

    # Species
    community.clear()
    new_species_id = 0
    for network_id, network in neural_networks.items():
        find_species(network_id)

    # rank each blocky by results
    ranking = []
    for network_id in results:
        ranking.append(network_id)
    ranking.sort(key=rank)
    ranking.reverse()
예제 #15
0
#!/usr/bin/env python

import os
import sys
import glob

from pprint import pprint

import config

cfg = config.test()


def checkDirs():

    mainDir = cfg.getDropBoxMainDir()

    # get list of subdirs:
    subDirsToCheck = glob.glob(mainDir)

    for entry in subDirsToCheck:
        subDirs = os.listdir(entry)
        for subDir in subDirs:
            items = os.listdir(os.path.join(entry, subDir))
            print 'checking %20s found %3i items: %s' % (subDir, len(items),
                                                         ','.join(items))


def checkLogs():

    mainDir = cfg.getDropBoxMainDir()
예제 #16
0
파일: test.py 프로젝트: lchaparr/ThesisCode
    def testRun(self):
        tstConfig = config.test()

        # override baseUrl to use private VM
        tstConfig.baseUrl = doUpload.frontendUrlTemplate % doUpload.frontendHost

        (username, account, password) = netrc.netrc().authenticators('newOffDb')
        frontendHttp = http.HTTP()
        frontendHttp.setBaseUrl(tstConfig.baseUrl)

        folders = os.listdir( 'testFiles' )

        logging.info('Testing %s bunches...', len(folders))

        i = 0
        for folder in folders:
            i += 1

            loggingPrefix = '  [%s/%s] %s:' % (i, len(folders), folder)
            logging.info('%s Testing bunch...', loggingPrefix)

            logging.info( '%s Signing in the frontend...', loggingPrefix)
            frontendHttp.query('signIn', {
                'username': username,
                'password': password,
            })

            # First ask also to hold the files until we have uploaded all
            # the folder to prevent the backend from taking them in-between.
            logging.info( '%s Asking the frontend to hold files...', loggingPrefix)
            frontendHttp.query('holdFiles')

            # Wait until the dropBox has nothing to do
            logging.info( '%s Waiting for backend to be idle...', loggingPrefix)
            while dataAccess.getLatestRunLogStatusCode() != Constants.NOTHING_TO_DO:
                time.sleep(2)

            # When we reach this point, the server will always report an empty
            # list of files, so even if it starts a new run right now, we can
            # safely manipulate the list of files. Therefore, ask the frontend
            # to do a clean up to delete previous files and database entries
            logging.info( '%s Asking the frontend to clean up files and database...', loggingPrefix)
            frontendHttp.query('cleanUp')

            # Upload all the test files in the folder
            logging.info('%s Uploading files...', loggingPrefix)
            self.upload(folder, loggingPrefix = loggingPrefix)

            # And finally release the files so that the backend can take them
            logging.info( '%s Asking the frontend to release files...', loggingPrefix)
            frontendHttp.query('releaseFiles')

            logging.info( '%s Signing out the frontend...', loggingPrefix)
            frontendHttp.query('signOut')

            # The backend will process the files eventually, so wait for
            # a finished status code
            logging.info('%s Waiting for backend to process files...', loggingPrefix)
            while True:
                statusCode = dataAccess.getLatestRunLogStatusCode()

                if statusCode in frozenset([Constants.DONE_WITH_ERRORS, Constants.DONE_ALL_OK]):
                    break

                time.sleep(2)

            # First compare the runLog's statusCode
            logging.info('%s Comparing runLog results...', loggingPrefix)
            with open(os.path.join('testFiles', folder, 'statusCode'), 'rb') as f:
                self.assertEqual(statusCode, getattr(Constants, f.read().strip()))

            # Then compare the runLog's logs
            (creationTimestamp, downloadLog, globalLog) = dataAccess.getLatestRunLogInfo()

            downloadLog = logPack.unpack(downloadLog)
            globalLog = logPack.unpack(globalLog)

            logging.debug('downloadLog = %s', downloadLog)
            logging.debug('globalLog = %s', globalLog)

            with open(os.path.join('testFiles', folder, 'downloadLog'), 'rb') as f:
                templateMatch.match(f.read(), downloadLog)
            
            with open(os.path.join('testFiles', folder, 'globalLog'), 'rb') as f:
                templateMatch.match(f.read(), globalLog)

            tests = [x.partition('.txt')[0] for x in glob.glob(os.path.join('testFiles', folder, '*.txt'))]

            logging.info('%s Comparing %s fileLogs results...', loggingPrefix, len(tests))

            # Then for each file in the test, compare the fileLog's foreign key, statusCode and log
            j = 0
            for test in tests:
                j += 1

                logging.info('%s   [%s/%s] %s: Comparing file...', loggingPrefix, j, len(tests), os.path.basename(test))

                # Get the expected file hash
                with open('%s.fileHash' % test, 'rb') as f:
                    fileHash = f.read().strip()

                (fileStatusCode, fileLog, runLogCreationTimestamp) = dataAccess.getFileLogInfo(fileHash)

                # Compare the foreign key
                self.assertEqual(creationTimestamp, runLogCreationTimestamp)

                # Compare the statusCode
                with open('%s.statusCode' % test, 'rb') as f:
                    self.assertEqual(fileStatusCode, getattr(Constants, f.read().strip()))

                fileLog = logPack.unpack(fileLog)

                # Compare the fileLog
                with open('%s.fileLog' % test, 'rb') as f:
                    templateMatch.match(f.read(), fileLog)
예제 #17
0
#!/usr/bin/env python

import os
import sys
import glob

from pprint import pprint

import config

cfg = config.test()

def checkDirs():

    mainDir = cfg.getDropBoxMainDir()

    # get list of subdirs:
    subDirsToCheck = glob.glob(mainDir)

    for entry in subDirsToCheck:
        subDirs = os.listdir( entry )
        for subDir in subDirs:
            items = os.listdir( os.path.join(entry, subDir) )
            print 'checking %20s found %3i items: %s' % (subDir, len(items), ','.join(items))

def checkLogs():

    mainDir = cfg.getDropBoxMainDir( )
    logDir = os.path.join(mainDir, 'logs', 'backup')

    logFile = logDir + '/Downloader.log'
예제 #18
0
def test():
    config.test()
    raise NotImplementedError()