Exemplo n.º 1
0
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client
import urllib2, urllib, datetime, time

# password
from config import panda_config
passwd = panda_config.dbpasswd

# time limit
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(hours=2)

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect('adbpro.usatlas.bnl.gov', passwd, 'panda-developer',
               'PandaDevDB')

# get PandaIDs from jobsDefined
res = proxyS.querySQL(
    "SELECT dispatchDBlock from jobsDefined4 GROUP BY dispatchDBlock")

# emulate DDM callbacks
jobs = []
for dispatchDBlock, in res:
    # get VUID and creationdate
    resvuid = proxyS.querySQL(
        "SELECT vuid,creationdate from Datasets WHERE name='%s'" %
        dispatchDBlock)
    if len(resvuid) == 1:
        vuid, creationdate = resvuid[0]
        # convert creatindate to datetime
        creation_datetime = datetime.datetime(
Exemplo n.º 2
0
import time
from dataservice.DDM import ddm
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client
import urllib2,urllib,datetime,time
import jobscheduler.siteinfo
import jobscheduler.Site
import brokerage.broker_util

# password
# A very minor edit.
from config import panda_config
passwd = panda_config.dbpasswd

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost,panda_config.dbpasswd,panda_config.dbuser,panda_config.dbname)

# get PandaIDs from jobsDefined
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
sql  = "SELECT dispatchDBlock from jobsDefined4 WHERE jobStatus='assigned' AND prodSourceLabel='managed' "
sql += "AND (computingSite='BNL_ATLAS_1' OR computingSite='BNL_ATLAS_2') AND modificationTime<'%s' "
sql += "GROUP BY dispatchDBlock"

res = proxyS.querySQL(sql % timeLimit.strftime('%Y-%m-%d %H:%M:%S'))

# emulate DDM callbacks
for dispatchDBlock, in res:
    print dispatchDBlock
    time.sleep(5)
    # get file list
Exemplo n.º 3
0
import userinterface.Client as Client
from dataservice.DDM import ddm
from taskbuffer.DBProxy import DBProxy
from taskbuffer.TaskBuffer import taskBuffer
from pandalogger.PandaLogger import PandaLogger
from jobdispatcher.Watcher import Watcher

# logger
_logger = PandaLogger().getLogger('closeDS')

# password
from config import panda_config
passwd = panda_config.dbpasswd

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost, panda_config.dbpasswd, panda_config.dbuser,
               panda_config.dbname)

# time limit for dataset closing
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(days=7)

# close datasets
while True:
    sql = "SELECT vuid,name,modificationdate FROM Datasets " + \
          "WHERE type='output' AND (status='running' OR status='created' OR status='defined') " + \
          "AND modificationdate<'%s' AND name REGEXP '_sub[[:digit:]]+$'"
    ret, res = proxyS.querySQLS(sql % timeLimit.strftime('%Y-%m-%d %H:%M:%S'))
    _logger.debug("# of dataset : %s" % len(res))
    if len(res) == 0:
        break
Exemplo n.º 4
0
import re
from jobscheduler import siteinfo

from taskbuffer.DBProxy import DBProxy

# password
from config import panda_config
passwd = panda_config.dbpasswd

proxyN = DBProxy()
proxyN.connect(panda_config.logdbhost,panda_config.logdbpasswd,panda_config.logdbuser,'PandaMetaDB')

status,res = proxyN.querySQLS("SELECT nickname from schedconfig")

nicknames = []
for (nickname,) in res:
    nicknames.append(nickname)


print "PandaSiteIDs = {"
sites = siteinfo.sites.keys()
sites.sort()
for site in sites:
    vals = siteinfo.sites[site]
    okFlag = vals[10]
    fName = ''
    sitePat = site
    sitePat = re.sub('_PAUL','',sitePat)
    sitePat = re.sub('_TEST$','',sitePat)
    sitePat = re.sub('_test$','',sitePat)    
    sitePat = re.sub('^ANALY_LONG_','',sitePat)        
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client
import urllib2,urllib,datetime,time

# password
from config import panda_config
passwd = panda_config.dbpasswd

# time limit
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(hours=2)

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect('adbpro.usatlas.bnl.gov',passwd,'panda-developer','PandaDevDB')

# get PandaIDs from jobsDefined
res = proxyS.querySQL("SELECT dispatchDBlock from jobsDefined4 GROUP BY dispatchDBlock")

# emulate DDM callbacks
jobs=[]
for dispatchDBlock, in res:
    # get VUID and creationdate
    resvuid = proxyS.querySQL("SELECT vuid,creationdate from Datasets WHERE name='%s'" % dispatchDBlock)
    if len(resvuid) == 1:
        vuid,creationdate = resvuid[0]
        # convert creatindate to datetime
        creation_datetime = datetime.datetime(*time.strptime(creationdate,'%Y-%m-%d %H:%M:%S')[:6])
        if creation_datetime < timeLimit:
            # make HTTP request
            node={'vuid':vuid}
            url=Client.baseURLSSL+'/datasetCompleted'
Exemplo n.º 6
0
import sys
import time
import datetime
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client

# password
from config import panda_config
passwd = panda_config.dbpasswd

cloud = sys.argv[1]

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost,panda_config.dbpasswd,panda_config.dbuser,panda_config.dbname)

while True:
    # get PandaIDs
    res = proxyS.querySQL("SELECT PandaID FROM jobsWaiting4 WHERE cloud='%s' ORDER BY PandaID" % cloud)
    # escape
    if len(res) == 0:
        break
    # convert to list
    jobs = []
    for id, in res:
        jobs.append(id)
    # reassign
    nJob = 300
    iJob = 0
    while iJob < len(jobs):
        print 'killJobs(%s)' % jobs[iJob:iJob+nJob]
import datetime
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client
import jobscheduler.Site
import random
import time

# password
from config import panda_config
passwd = panda_config.dbpasswd

# time limit
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(hours=1)

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost,panda_config.dbpasswd,panda_config.dbuser,panda_config.dbname)

# get PandaIDs from jobsDefined
res = proxyS.querySQL("SELECT PandaID,modificationTime from jobsDefined4 ORDER BY modificationTime")

# list of known sites
tmpSites = jobscheduler.Site.KnownSite.getAllSitesID()
allSites = []
for site in tmpSites:
    # _allSites may conain NULL after sort()
    if site == 'NULL':
        continue
    # ignore test sites
    if site.endswith('test') or site.endswith('Test'):
        continue
Exemplo n.º 8
0
import sys
import time
import datetime
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client

# password
from config import panda_config
passwd = panda_config.dbpasswd

cloud = sys.argv[1]

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost, panda_config.dbpasswd, panda_config.dbuser,
               panda_config.dbname)

while True:
    # get PandaIDs
    res = proxyS.querySQL(
        "SELECT PandaID FROM jobsWaiting4 WHERE cloud='%s' ORDER BY PandaID" %
        cloud)
    # escape
    if len(res) == 0:
        break
    # convert to list
    jobs = []
    for id, in res:
        jobs.append(id)
    # reassign
    nJob = 300
Exemplo n.º 9
0
import time
from dataservice.DDM import ddm
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client
import urllib2, urllib, datetime, time
import jobscheduler.siteinfo
import jobscheduler.Site
import brokerage.broker_util

# password
# A very minor edit.
from config import panda_config
passwd = panda_config.dbpasswd

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost, panda_config.dbpasswd, panda_config.dbuser,
               panda_config.dbname)

# get PandaIDs from jobsDefined
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
sql = "SELECT dispatchDBlock from jobsDefined4 WHERE jobStatus='assigned' AND prodSourceLabel='managed' "
sql += "AND (computingSite='BNL_ATLAS_1' OR computingSite='BNL_ATLAS_2') AND modificationTime<'%s' "
sql += "GROUP BY dispatchDBlock"

res = proxyS.querySQL(sql % timeLimit.strftime('%Y-%m-%d %H:%M:%S'))

# emulate DDM callbacks
for dispatchDBlock, in res:
    print dispatchDBlock
    time.sleep(5)
Exemplo n.º 10
0
import userinterface.Client as Client
from dataservice.DDM import ddm
from taskbuffer.DBProxy import DBProxy
from taskbuffer.TaskBuffer import taskBuffer
from pandalogger.PandaLogger import PandaLogger
from jobdispatcher.Watcher import Watcher

# logger
_logger = PandaLogger().getLogger('closeDS')

# password
from config import panda_config
passwd = panda_config.dbpasswd

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost,panda_config.dbpasswd,panda_config.dbuser,panda_config.dbname)

# time limit for dataset closing
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(days=7)

# close datasets
while True:
    sql = "SELECT vuid,name,modificationdate FROM Datasets " + \
          "WHERE type='output' AND (status='running' OR status='created' OR status='defined') " + \
          "AND modificationdate<'%s' AND name REGEXP '_sub[[:digit:]]+$'"
    ret,res = proxyS.querySQLS(sql % timeLimit.strftime('%Y-%m-%d %H:%M:%S'))
    _logger.debug("# of dataset : %s" % len(res))
    if len(res) == 0:
        break
    for (vuid,name,modDate) in res:
Exemplo n.º 11
0
# table names
cdate = datetime.datetime.utcnow()
if cdate.month==1:
    cdate = cdate.replace(year = (cdate.year-1))
    cdate = cdate.replace(month = 12, day = 1)
else:
    cdate = cdate.replace(month = (cdate.month/2)*2, day = 1)
currentSuffix = "_%s%s" % (cdate.strftime('%b'),cdate.year)
if cdate.month > 2:
    odate = cdate.replace(month = (cdate.month-2))
else:
    odate = cdate.replace(year = (cdate.year-1), month = 12)
previousSuffix = "_%s%s" % (odate.strftime('%b'),odate.year)

# instantiate DB proxies
proxyS = DBProxy()
proxyN = DBProxy()
proxyS.connect(panda_config.dbhost,panda_config.dbpasswd,panda_config.dbuser,panda_config.dbname)
proxyN.connect(panda_config.logdbhost,panda_config.logdbpasswd,panda_config.logdbuser,'PandaArchiveDB')

# get tables
fileTables = []
jobsTables = {}
status,res = proxyN.querySQLS("show tables")
if res != None:
    for table, in res:
        if table.startswith('filesTable'):
            fileTables.append(table)
        if table.startswith('jobsArchived'):
            # get MAX PandaID
            statusJ,resJ = proxyN.querySQLS("SELECT MAX(PandaID) FROM %s" % table)
Exemplo n.º 12
0
import datetime
from taskbuffer.DBProxy import DBProxy
import userinterface.Client as Client

# password
from config import panda_config
passwd = panda_config.dbpasswd

# time limit
timeLimit = datetime.datetime.utcnow() - datetime.timedelta(days=1)

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect('adbpro.usatlas.bnl.gov',passwd,'panda-developer','PandaDevDB')

# get PandaIDs from jobsDefined
res = proxyS.querySQL("SELECT PandaID,modificationTime from jobsDefined4 ORDER BY modificationTime")

# kill f old
jobs=[]
for (id,modTime) in res:
    if modTime < timeLimit:
        jobs.append(id)

Client.killJobs(jobs)

Exemplo n.º 13
0
# password
from config import panda_config
passwd = panda_config.dbpasswd

if len(sys.argv) == 2:
    startID = int(sys.argv[1])
    endID = startID
else:
    startID = int(sys.argv[1])
    endID = int(sys.argv[2])
    if startID > endID:
        print '%d is less than %d' % (endID, startID)
        sys.exit(1)

# instantiate DB proxies
proxyS = DBProxy()
proxyS.connect(panda_config.dbhost, panda_config.dbpasswd, panda_config.dbuser,
               panda_config.dbname)

# get PandaIDs from jobsDefined
res = proxyS.querySQL(
    "SELECT dispatchDBlock from jobsDefined4 WHERE PandaID>=%s AND PandaID<=%s GROUP BY dispatchDBlock"
    % (startID, endID))

# emulate DDM callbacks
for dispatchDBlock, in res:
    # get VUID and creationdate
    resvuid = proxyS.querySQL("SELECT vuid from Datasets WHERE name='%s'" %
                              dispatchDBlock)
    if len(resvuid) == 1:
        vuid, = resvuid[0]