max_send_amount = 1 sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.test_send_castor_jobs.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER env.password = settings.COMPASS_PASS
import sys import saga import os import logging import time from utils import check_process, getRotatingFileHandler from django.conf import settings from django.core.wsgi import get_wsgi_application sys.path.append(os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.get_proxy.log') logger.info('Starting %s' % __file__) def main(): proxy_local = '/tmp/x509up_u%s' % os.geteuid() try: ctx = saga.Context("UserPass") ctx.user_id = settings.PROXY_USER_ID # remote login name ctx.user_pass = settings.PROXY_PASSWORD # password if os.path.isfile(proxy_local): old_proxy = os.stat(proxy_local).st_mtime logger.info("Current proxy: %s" % time.ctime(old_proxy)) logger.info('Connect to %s' % settings.PROXY_HOST) session = saga.Session() session.add_context(ctx)
from taskbuffer.FileSpec import FileSpec sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.send_jobs.log') logger.info('Starting %s' % __file__) logger.info('Setting environment for PanDA client') aSrvID = None os.environ["PANDA_URL_SSL"] = settings.PANDA_URL_SSL os.environ["PANDA_URL"] = settings.PANDA_URL os.environ["X509_USER_PROXY"] = settings.X509_USER_PROXY logger.info('PanDA URL SSL: %s' % os.environ["PANDA_URL_SSL"]) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__)
from fabric.context_managers import shell_env, cd import csv sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.check_castor_mdst_status.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process('check_castor_status_mdst.py', pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER env.password = settings.COMPASS_PASS
def check_files_on_castor(): logger.info('Getting productions with castor mdst status sent') tasks_list = Job.objects.filter(status_castor_mdst='sent').values_list( 'task_id', 'task__path', 'task__soft', 'task__prodslt', 'task__phastver', 'task__type').distinct() logger.info('Got list of %s prods: %s' % (len(tasks_list), tasks_list)) logger.info( 'Check details in the corresponding periodic_tasks.check_castor_mdst_status_taskid.log' ) for t in tasks_list: logger_task = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler( logger_task, 'periodic_tasks.check_castor_mdst_status_%s.log' % t[0]) logger_task.info('Starting') logger_task.info('Getting mdst chunks with castor mdst status sent') chunks_list = Job.objects.filter(task__id=t[0]).filter( status_castor_mdst='sent').values_list( 'task_id', 'run_number', 'chunk_number_merging_mdst', 'date_updated').distinct() logger_task.info('Got list of %s chunks' % len(chunks_list)) logger_task.info( 'Going to request list of files on castor for task %s' % t[0]) oracle_dst = '' if t[5] == 'mass production': oracle_dst = '/oracle_dst/' cmd = 'nsls -l /castor/cern.ch/compass/%(prodPath)s%(oracleDst)s%(prodSoft)s/mDST/' % { 'prodPath': t[1], 'prodSoft': t[2], 'oracleDst': oracle_dst } logger_task.info(cmd) result = exec_remote_cmd(cmd) if result.succeeded: logger_task.info('Successfully read files on castor for task %s' % t[0]) for c in chunks_list: found = False reader = csv.DictReader(result.splitlines(), delimiter=' ', skipinitialspace=True, fieldnames=[ 'permissions', 'links', 'owner', 'group', 'size', 'date1', 'date2', 'time', 'name' ]) test = 'mDST-%(runNumber)s-%(prodSlt)s-%(phastVer)s.root' % { 'runNumber': c[1], 'prodSlt': t[3], 'phastVer': t[4] } if format(int(c[2]), '03d') != '000': test = test + '.' + str(format(c[2], '03d')) for r in reader: # logger_task.info('name - test: %s - %s' % (r['name'], test)) if r['name'] == test: found = True logger_task.info(r) logger_task.info( 'Found "%s" for task id %s run number %s chunk number %s, %s' % (r['permissions'][0], t[0], c[1], c[2], test)) if r['permissions'][0] == 'm': logger_task.info( 'Going to update jobs of the chunk as migrated' ) try: j_update = Job.objects.filter( task=t[0], run_number=c[1], chunk_number_merging_mdst=c[2]).update( status_castor_mdst='finished', date_updated=today) logger_task.info( 'Job status_castor_mdst changed to finished for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) except: logger_task.error( 'Failed to update jobs for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) else: logger_task.info('Chunk not yet migrated') if r['size'] == '0': diff = datetime.datetime.now().replace( tzinfo=None) - c[3].replace(tzinfo=None) logger_task.info( 'File %s was not delivered, transfer was submitted at %s which is %s hours from now' % (test, c[3], (diff.seconds / 3600))) if diff.seconds / 3600 >= 12: logger_task.info( 'Problematic chunk found, status will be changed to ready for rewriting' ) restart_transfer(logger_task, t[0], c[1], c[2]) break if found is False: diff = datetime.datetime.now().replace( tzinfo=None) - c[3].replace(tzinfo=None) logger_task.info( 'File %s was not delivered, transfer was submitted at %s which is %s hours from now' % (test, c[3], (diff.seconds / 3600))) if diff.seconds / 3600 >= 1: logger.info( 'Transfer request was performed in more than 1 hours ago, going to restart it' ) restart_transfer(logger_task, t[0], c[1], c[2]) else: logger_task.info('Error reading files on castor for task %s' % t) logger_task.error(result) logger_task.info('done') logger_task.handlers[0].close()
sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler max_check_amount = 3000 logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.get_number_of_events.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("get_number_of_events.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER
import csv sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.delete_panda_log_files.log') logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("delete_panda_log_files.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER env.password = settings.COMPASS_PASS
from fabric.api import env, run, execute, settings as sett, hide from fabric.context_managers import shell_env, cd sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.send_castor_jobs_dump.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("send_castor_jobs_dump.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER
def check_files_on_castor(): logger.info('Getting productions with castor histos status sent') tasks_list = Job.objects.filter(status_castor_histos='sent').values_list( 'task_id', 'task__path', 'task__soft', 'task__prodslt', 'task__phastver', 'task__type').distinct() logger.info('Got list of %s prods: %s' % (len(tasks_list), tasks_list)) logger.info( 'Check details in the corresponding periodic_tasks.check_castor_hist_status_taskid.log' ) for t in tasks_list: logger_task = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler( logger_task, 'periodic_tasks.check_castor_hist_status_%s.log' % t[0]) logger_task.info('Starting') logger_task.info( 'Getting histos with castor histos status sent for task id %s' % t[0]) chunks_list = Job.objects.filter(task__id=t[0]).filter( status_castor_histos='sent').values_list( 'task_id', 'run_number', 'chunk_number_merging_histos').distinct() logger_task.info('Got list of %s chunks' % len(chunks_list)) logger_task.info( 'Going to request list of files on castor for task %s' % t[0]) oracle_dst = '' if t[5] == 'mass production': oracle_dst = '/oracle_dst/' cmd = 'nsls -l /castor/cern.ch/compass/%(prodPath)s%(oracleDst)s%(prodSoft)s/histos/' % { 'prodPath': t[1], 'prodSoft': t[2], 'oracleDst': oracle_dst } logger_task.info(cmd) result = exec_remote_cmd(cmd) if result.succeeded: reader = csv.DictReader(result.splitlines(), delimiter=' ', skipinitialspace=True, fieldnames=[ 'permissions', 'links', 'owner', 'group', 'size', 'date1', 'date2', 'time', 'name' ]) logger_task.info('Successfully read files on castor for task %s' % t[0]) for c in chunks_list: test = 'histsum-%(runNumber)s-%(prodSlt)s-%(phastVer)s.root' % { 'runNumber': c[1], 'prodSlt': t[3], 'phastVer': t[4] } if format(int(c[2]), '03d') != '000': test = test + '.' + str(format(c[2], '03d')) for r in reader: if r['name'].find(test) != -1: logger_task.info(r) logger_task.info( 'Found "%s" for task id %s run number %s chunk number %s, %s' % (r['permissions'][0], t[0], c[1], c[2], test)) if r['permissions'][0] == 'm': logger_task.info( 'Going to update jobs of the chunk as migrated' ) try: j_update = Job.objects.filter( task=t[0], run_number=c[1], chunk_number_merging_histos=c[2]).update( status_castor_histos='finished', date_updated=today) logger_task.info( 'Job status_castor_histos changed to finished for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) except: logger_task.error( 'Failed to update jobs for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) else: logger_task.info('Chunk not yet migrated') if r['size'] == '0': logger_task.info( 'Problematic chunk found, status will be changed to ready for rewriting' ) try: j_update = Job.objects.filter( task=t[0], run_number=c[1], chunk_number_merging_histos=c[2] ).update(status_castor_histos='ready', date_updated=today) logger_task.info( 'Job status_castor_histos changed to ready for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) except: logger_task.error( 'Failed to update jobs for task %s run number %s chunk number %s' % (t[0], c[1], c[2])) break else: logger_task.info('Error reading files on castor for task %s' % t) logger_task.error(result) logger_task.info('done') logger_task.handlers[0].close()
import csv sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.archive_logs.log') logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("archive_logs.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER env.password = settings.COMPASS_PASS
import logging from django.core.wsgi import get_wsgi_application from django.db import DatabaseError, IntegrityError sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.define_jobs_for_task.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) def main(): logger.info('Getting tasks with status ready') td = Task.objects.all().filter(status='ready').filter( files_source='files list')
from django.core.wsgi import get_wsgi_application from django.db import DatabaseError, IntegrityError from _mysql import NULL sys.path.append(os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Filestable4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.x_check_dump.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) def main(): logger.info('Getting tasks with status send, running and paused') tasks_list = Task.objects.all().filter(Q(status='send') | Q(status='running') | Q(status='paused')) # tasks_list = Task.objects.all().filter(name='dvcs2016P01-DDD') logger.info('Got list of %s tasks' % len(tasks_list))
import csv sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.prepare_files_on_castor.log') logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("prepare_files_on_castor.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER env.password = settings.COMPASS_PASS
from taskbuffer.FileSpec import FileSpec sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Filestable4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.send_merging_jobs_mdst.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) logger.info('Setting environment for PanDA client') aSrvID = None os.environ["PANDA_URL_SSL"] = settings.PANDA_URL_SSL os.environ["PANDA_URL"] = settings.PANDA_URL os.environ["X509_USER_PROXY"] = settings.X509_USER_PROXY logger.info('PanDA URL SSL: %s' % os.environ["PANDA_URL_SSL"]) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid):
from fabric.context_managers import shell_env, cd import csv sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.define_jobs_from_runs.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) logger.info('__file__: %s' % __file__) if check_process("define_jobs_from_runs.py", pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) env.hosts = [] env.hosts.append(settings.COMPASS_HOST) env.user = settings.COMPASS_USER
application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4, Jobsarchived4 from utils import check_process, getRotatingFileHandler parser = argparse.ArgumentParser() parser.add_argument('-t', '--task', type=int, required=True) parser.add_argument('-r', '--run-number', type=int, required=True) args = parser.parse_args() logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler( logger, 'periodic_tasks.check_job_panda_status_%s_%s.log' % (args.task, args.run_number)) today = timezone.now() logger.info('Starting %s %s %s' % (__file__, args.task, args.run_number)) def main(): logger.info('Getting task object with id %s' % args.task) t = Task.objects.get(id=args.task) logger.info( 'Getting jobs with status send and running for task number %s and run number %s' % (args.task, args.run_number)) jobs_list = Job.objects.filter(task=t).filter( run_number=args.run_number).filter(
from django.core.wsgi import get_wsgi_application from django.db import DatabaseError, IntegrityError from _mysql import NULL sys.path.append(os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from schedconfig.models import Jobsactive4, Jobsarchived4 from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.check_merging_dump_job_status.log') today = datetime.datetime.today() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) def main(): logger.info('Getting tasks with status send, running and paused') tasks_list = Task.objects.all().filter(Q(status='send') | Q(status='running') | Q(status='paused')) #tasks_list = Task.objects.all().filter(name='dvcs2017P01t1_mu-_part2') logger.info('Got list of %s tasks' % len(tasks_list))
import subprocess import time sys.path.append( os.path.join(os.path.dirname(__file__), '../../')) # fix me in case of using outside the project os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compass.settings") application = get_wsgi_application() from django.db.models import Q from prodsys.models import Task, Job from utils import check_process, getRotatingFileHandler logger = logging.getLogger('periodic_tasks_logger') getRotatingFileHandler(logger, 'periodic_tasks.check_job_panda_status.log') today = timezone.now() logger.info('Starting %s' % __file__) pid = str(os.getpid()) logger.info('pid: %s' % pid) if check_process(__file__, pid): logger.info('Another %s process is running, exiting' % __file__) sys.exit(0) def get_running_processes(): running_processes = [] logger.info('Going to check running processes')