Exemple #1
0
    def __init__(self):

        # Init the runDB
        self.db = ConnectMongoDB()

        #Take all data types categories
        self.RAW_RECORDS_TPC_TYPES = helper.get_hostconfig()['raw_records_tpc_types']
        self.RAW_RECORDS_MV_TYPES = helper.get_hostconfig()['raw_records_mv_types']
        self.RAW_RECORDS_NV_TYPES = helper.get_hostconfig()['raw_records_nv_types']
        self.LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig()['light_raw_records_tpc_types']
        self.LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig()['light_raw_records_mv_types']
        self.LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig()['light_raw_records_nv_types']
        self.HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types']
        self.RECORDS_TYPES = helper.get_hostconfig()['records_types']

        self.n_upload_threads_low = helper.get_hostconfig()['n_upload_threads_low']
        self.n_upload_threads_high = helper.get_hostconfig()['n_upload_threads_high']

        #Choose which data type you want to treat
        self.DTYPES = self.RAW_RECORDS_TPC_TYPES + self.RAW_RECORDS_MV_TYPES + self.RAW_RECORDS_NV_TYPES + self.LIGHT_RAW_RECORDS_TPC_TYPES + self.LIGHT_RAW_RECORDS_MV_TYPES + self.LIGHT_RAW_RECORDS_NV_TYPES + self.HIGH_LEVEL_TYPES + self.RECORDS_TYPES

        self.HIGH_DTYPES = self.LIGHT_RAW_RECORDS_TPC_TYPES + self.LIGHT_RAW_RECORDS_MV_TYPES + self.LIGHT_RAW_RECORDS_NV_TYPES + self.HIGH_LEVEL_TYPES

        self.LOW_DTYPES = self.RAW_RECORDS_TPC_TYPES + self.RAW_RECORDS_MV_TYPES + self.RAW_RECORDS_NV_TYPES + self.RECORDS_TYPES

        self.threads = []
Exemple #2
0
def list_file_replicas(run_number, dtype, hash, rse='UC_DALI_USERDISK'):

#    db = ConnectMongoDB()
    rc = RucioSummoner(helper.get_hostconfig("rucio_backend"))

#    print("Looking for run "+str(run_number)+", data type "+dtype+", hash "+hash+", in rse="+rse)

    # checks if run is present in run database
    # this will improve the reaction speed in case the run is not existing
    # since we do not call Rucio commands
#    cursor = db.GetRunByNumber(run_number)
#    if len(cursor)==0:
#        print("Error. Run not existing in database")
#        return list()

    # build did
    did = make_did(run_number, dtype, hash)

    file_replicas = {}

    # check if the did esists in the given rse
    if rc.CheckRule(did, rse) != 'OK':
#        print("Error. Not found in this rse")
        return list()

    file_replicas = rc.ListFileReplicas(did,rse,localpath=True)

    return list(file_replicas.values())
Exemple #3
0
def remove_datatype_from_db(did,rse):

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))
    RSES = helper.get_hostconfig()['rses']


    DB = ConnectMongoDB()
    rc = RucioSummoner()

    hash = did.split('-')[-1]
    dtype = did.split('-')[0].split(':')[-1]
    number = int(did.split(':')[0].split('_')[-1])

    print("Removing",number,dtype,hash)


    rundoc = DB.db.find_one({'number' : number})

    print("status = ",rundoc['status'])

    run_id = "%06d" % number

    # make it uploadable
    DB.db.find_one_and_update({'number':number},{'$set':{"status": "transferring"}})

    # Remove DB entries for all RSEs
    for d in rundoc['data']:
        if d['type'] == dtype and d['host'] == 'rucio-catalogue' and d['did'] == did and d['location'] == rse:

            # Remove the data entry in DB
            print("Deleting data = ",d)
            DB.db.update_one({"_id" : rundoc['_id']},
                              {"$pull" : {"data" : d} })
Exemple #4
0
def main():
    print("")
    print("--------------------------")
    print("-- aDMIX Upload Manager --")
    print("--------------------------")
    print("")

    parser = argparse.ArgumentParser(description="aDMIX Upload Manager")

    config = Config()

    # From here the input depends on the usage of the command
    # Add arguments for the process manager:
    parser.add_argument('--admix-config', dest="admix_config", type=str, default=config.get('Admix','config_file'),
                        help="Load your host configuration")
    parser.add_argument('--once', dest='once', action='store_true',
                        help="Run aDMIX Upload Manager only once")
    parser.add_argument('--high', dest='high', action='store_true',
                        help="Treat only high level data types")
    parser.add_argument('--low', dest='low', action='store_true',
                        help="Treat only low level data types")
    args = parser.parse_args()

    #We make the individual arguments global available right after aDMIX starts:
    helper.make_global("admix_config", os.path.abspath(args.admix_config))
    helper.make_global("high", args.high)
    helper.make_global("low", args.low)
    helper.make_global("once", args.once)

    #Pre tests:
    # admix host configuration must match the hostname:
    if helper.get_hostconfig()['hostname'] != helper.get_hostname():
        print("admix configuration file for %s" % helper.get_hostconfig()['hostname'])
        print(helper.get_hostname())
        print("You are at {0}".format( helper.get_hostname()))
        exit()

    #Setup the logger in a very basic modi
#    lg = Logger(logpath=helper.get_hostconfig()['log_path'],
#                loglevel=logging.DEBUG)
#    lg.Info("-----------------------------------------")
#    lg.Info("aDMIX - advanced Data Management in XENON")
#    helper.make_global("logger", lg)

    upload_manager = UploadManager()
    
    upload_manager.loop()
Exemple #5
0
def showcontexts():

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))

    #Define data types
    NORECORDS_DTYPES = helper.get_hostconfig()['norecords_types']
    RAW_RECORDS_DTYPES = helper.get_hostconfig()['raw_records_types']
    RECORDS_DTYPES = helper.get_hostconfig()['records_types']

    #Get other parameters
    DATADIR = helper.get_hostconfig()['path_data_to_upload']
    periodic_check = helper.get_hostconfig()['upload_periodic_check']
    RSES = helper.get_hostconfig()['rses']

    #Init the runDB
    db = ConnectMongoDB()

    data_types = RAW_RECORDS_DTYPES + RECORDS_DTYPES + NORECORDS_DTYPES


    context = 'xenonnt_online'

    for dtype in data_types:
        hash = utilix.db.get_hash(context, dtype)
        print('Data type {0}, hash {1}'.format(dtype,hash))
Exemple #6
0
def remove_datatype_from_db_and_datamanager(did):

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))
    RSES = helper.get_hostconfig()['rses']


    DB = ConnectMongoDB()
    rc = RucioSummoner()

    hash = did.split('-')[-1]
    dtype = did.split('-')[0].split(':')[-1]
    number = int(did.split(':')[0].split('_')[-1])

    print("Removing",number,dtype,hash)


    rundoc = DB.db.find_one({'number' : number})

    print("status = ",rundoc['status'])

    run_id = "%06d" % number

    # make it uploadable
    DB.db.find_one_and_update({'number':number},{'$set':{"status": "eb_ready_to_upload"}})

    # Remove DB entries for all RSEs
    for d in rundoc['data']:
        if d['type'] == dtype and d['host'] == 'rucio-catalogue':

            # Remove the data entry in DB
            print("Deleting data = ",d)
            DB.db.update_one({"_id" : rundoc['_id']},
                              {"$pull" : {"data" : d} })

    # Remove Rucio rules for all RSEs
    for rse in RSES:

        rucio_rule = rc.GetRule(upload_structure=did, rse=rse)
        if rucio_rule['exists']:
            print("Deleting rucio rule = ", rucio_rule['id'])
            rc.DeleteRule(rucio_rule['id'])


    files = list_file_replicas(number, dtype, hash, "LNGS_USERDISK")
    print("Deleting rucio data in datamanager disk. Deleting",len(files),"files")
    for file in files:
        os.remove(file)
Exemple #7
0
    def loop(self):

        while True:
            
            self.run()

            if helper.global_dictionary.get('once'):
                break

            # Wait
            wait_time = helper.get_hostconfig()['sleep_time']
            print('Waiting for {0} seconds'.format(wait_time))
            print("You can safely CTRL-C now if you need to stop me")
            try:
                time.sleep(wait_time)
            except KeyboardInterrupt:
                break
Exemple #8
0
    def postpone(self):

        # Get the current screen session
        process = psutil.Process()
        screen = process.parent().parent().parent().parent().cmdline()[-1]

        # Take the tmp file of this session containing the dataset information
        filename = "/tmp/admix-" + screen

        # Destination name
        suffix = time.strftime("-%Y-%m-%d-%H-%M-%S",
                               time.localtime(time.time()))
        destination_path = helper.get_hostconfig(
        )['path_datasets_to_fix'] + "/"
        new_filename = destination_path + filename.split('/')[-1] + suffix

        if os.path.isfile(filename) and os.path.isdir(destination_path):
            shutil.move(filename, new_filename)
            print("Dataset postponed by moving file {0} to {1}".format(
                filename, new_filename))
Exemple #9
0
def remove_datatype_from_db_and_rule(number,dtype,rse):

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))
    RSES = helper.get_hostconfig()['rses']


    DB = ConnectMongoDB()
    rc = RucioSummoner()

    print("Removing",number,dtype)


    rundoc = DB.db.find_one({'number' : number})

    print("status = ",rundoc['status'])

    run_id = "%06d" % number

    # make it uploadable
#    DB.db.find_one_and_update({'number':number},{'$set':{"status": "transferring"}})


    for d in rundoc['data']:
        if d['type'] == dtype and d['host'] == 'rucio-catalogue' and d['location'] == rse:

            # Remove the data entry in DB
            print("Deleting datum from DB = ",d)
            DB.db.update_one({"_id" : rundoc['_id']},
                              {"$pull" : {"data" : d} })

            # Remove Rucio rule
            rucio_rule = rc.GetRule(upload_structure=d['did'], rse=rse)
            if rucio_rule['exists']:
                print("Deleting rucio rule = ", rucio_rule['id'])
                rc.DeleteRule(rucio_rule['id'])
Exemple #10
0
def your_admix():
    print("advanced Data Management in XENON")

    parser = argparse.ArgumentParser(description="Run your favourite aDMIX")

    config = Config()

    # From here the input depends on the usage of the command
    parser.add_argument('task',
                        nargs="?",
                        default="default",
                        help="Select an aDMIX task")
    parser.add_argument('--admix-config',
                        dest="admix_config",
                        type=str,
                        default=config.get('Admix', 'config_file'),
                        help="Load your host configuration")
    parser.add_argument('--once',
                        dest='once',
                        action='store_true',
                        help="Run aDMIX only once")
    parser.add_argument('--sleep-time',
                        dest='sleep_time',
                        type=int,
                        help="Time to wait before running again the task")
    args = parser.parse_args()

    helper.make_global("admix_config", os.path.abspath(args.admix_config))

    if args.sleep_time != None:
        helper.make_global("sleep_time", args.sleep_time)
    else:
        helper.make_global("sleep_time", helper.get_hostconfig()['sleep_time'])

    #Pre tests:
    # admix host configuration must match the hostname:
    if helper.get_hostconfig()['hostname'] != helper.get_hostname():
        print("admix configuration file for %s" %
              helper.get_hostconfig()['hostname'])
        print(helper.get_hostname())
        print("You are at {0}".format(helper.get_hostname()))
        exit()

    #Setup the logger in a very basic modi
    lg = Logger(logpath=helper.get_hostconfig()['log_path'],
                loglevel=logging.DEBUG)
    lg.Info("-----------------------------------------")
    lg.Info("aDMIX - advanced Data Management in XENON")
    helper.make_global("logger", lg)

    #Determine which tasks are addressed:
    # - if it comes from args.task use it, nevertheless what is defined in hostconfig("task")
    # - if args.task == default use hostconfig("task") information
    task_list = []
    if args.task == "default":
        task_list.extend(helper.get_hostconfig("task"))
    else:
        task_list = [args.task]

    #test if the list of tasks is available from the decorator
    task_test = [
        True if i_task in NameCollector else False for i_task in task_list
    ]
    task_list = np.array(task_list)[task_test]

    if len(task_list) == 0:
        print("Select a task from this list:")
        for i_task in NameCollector:
            print("  <> {0}".format(i_task))
        print("or adjust the 'task' field in your configuration")
        print("file: {0}".format(helper.global_dictionary["admix_config"]))
        exit()

    #Create a tmp file named as the screen session that contains this process
    process = psutil.Process()
    screen = process.parent().parent().parent().parent().cmdline()[-1]
    open("/tmp/admix-" + screen, 'a').close()

    #Loop over the inizialization of all classes
    for i_task in task_list:
        ClassCollector[i_task].init()

    #Go for the loop
    while True:

        for i_task in task_list:
            ClassCollector[i_task].run()

        if args.once == True:
            end_admix()
            break

        if os.path.exists("/tmp/admix-stop"):
            print("Exiting because of the presence of /tmp/admix-stop file")
            end_admix()
            break

        wait_time = helper.global_dictionary['sleep_time']
        if "CheckTransfers" in task_list or "CleanEB" in task_list:
            wait_time = 600

        print('Waiting for {0} seconds'.format(wait_time))
        print("You can safely CTRL-C now if you need to stop me")
        try:
            time.sleep(wait_time)

        except KeyboardInterrupt:
            end_admix()
            break
Exemple #11
0
    def init(self):
        helper.global_dictionary['logger'].Info(f'Init task {self.__class__.__name__}')

#        open("/tmp/admix-upload_from_lngs", 'a').close()

        #Take all data types categories
        self.RAW_RECORDS_TPC_TYPES = helper.get_hostconfig()['raw_records_tpc_types']
        self.RAW_RECORDS_MV_TYPES = helper.get_hostconfig()['raw_records_mv_types']
        self.RAW_RECORDS_NV_TYPES = helper.get_hostconfig()['raw_records_nv_types']
        self.LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig()['light_raw_records_tpc_types']
        self.LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig()['light_raw_records_mv_types']
        self.LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig()['light_raw_records_nv_types']
        self.HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types']
        self.RECORDS_TYPES = helper.get_hostconfig()['records_types']

        # Get the sequence of rules to be created according to the data type
        self.RAW_RECORDS_TPC_RSES = helper.get_hostconfig()["raw_records_tpc_rses"]
        self.RAW_RECORDS_MV_RSES = helper.get_hostconfig()["raw_records_mv_rses"]
        self.RAW_RECORDS_NV_RSES = helper.get_hostconfig()["raw_records_nv_rses"]
        self.LIGHT_RAW_RECORDS_TPC_RSES = helper.get_hostconfig()["light_raw_records_tpc_rses"]
        self.LIGHT_RAW_RECORDS_MV_RSES = helper.get_hostconfig()["light_raw_records_mv_rses"]
        self.LIGHT_RAW_RECORDS_NV_RSES = helper.get_hostconfig()["light_raw_records_nv_rses"]
        self.HIGH_LEVEL_RSES = helper.get_hostconfig()["high_level_rses"]
        self.RECORDS_RSES = helper.get_hostconfig()["records_rses"]

        # Choose which RSE you want upload to
        self.UPLOAD_TO = helper.get_hostconfig()['upload_to']

        # Choose where is the main path of data to be upload
        self.DATADIR = helper.get_hostconfig()['path_data_to_upload']

        #Init the runDB
        self.db = ConnectMongoDB()

        #Init Rucio for later uploads and handling:
        self.rc = RucioSummoner(helper.get_hostconfig("rucio_backend"))
        self.rc.SetRucioAccount(helper.get_hostconfig('rucio_account'))
        self.rc.SetConfigPath(helper.get_hostconfig("rucio_cli"))
        self.rc.SetProxyTicket(helper.get_hostconfig('rucio_x509'))
        self.rc.SetHost(helper.get_hostconfig('host'))
        self.rc.ConfigHost()
        self.rc.SetProxyTicket("rucio_x509")
Exemple #12
0
    def showrun(self, arg_number, arg_to, arg_dtypes, arg_compact,
                arg_dumpjson, arg_status, arg_latest, arg_pending):

        #Define data types
        RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['raw_records_tpc_types']
        RAW_RECORDS_MV_TYPES = helper.get_hostconfig()['raw_records_mv_types']
        RAW_RECORDS_NV_TYPES = helper.get_hostconfig()['raw_records_nv_types']
        LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['light_raw_records_tpc_types']
        LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig(
        )['light_raw_records_mv_types']
        LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig(
        )['light_raw_records_nv_types']
        HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types']
        RECORDS_TYPES = helper.get_hostconfig()['records_types']

        #Get other parameters
        DATADIR = helper.get_hostconfig()['path_data_to_upload']
        RSES = helper.get_hostconfig()['rses']

        minimum_number_acceptable_rses = 2
        minimum_deltadays_allowed = 3

        # Storing some backup hashes in case DID information is not available
        bkp_hashes = {
            'raw_records': 'rfzvpzj4mf',
            'raw_records_he': 'rfzvpzj4mf',
            'raw_records_mv': 'rfzvpzj4mf',
            'raw_records_aqmon': 'rfzvpzj4mf',
            'records': '56ausr64s7',
            'lone_hits': 'b7dgmtzaef'
        }

        context = 'xenonnt_online'

        #Init the runDB
        db = ConnectMongoDB()

        #Init Rucio for later uploads and handling:
        rc = RucioSummoner(helper.get_hostconfig("rucio_backend"))
        rc.SetRucioAccount(helper.get_hostconfig('rucio_account'))
        rc.SetConfigPath(helper.get_hostconfig("rucio_cli"))
        rc.SetProxyTicket(helper.get_hostconfig('rucio_x509'))
        rc.SetHost(helper.get_hostconfig('host'))
        rc.ConfigHost()
        rc.SetProxyTicket("rucio_x509")

        data_types = RAW_RECORDS_TPC_TYPES + RAW_RECORDS_MV_TYPES + RAW_RECORDS_NV_TYPES + LIGHT_RAW_RECORDS_TPC_TYPES + LIGHT_RAW_RECORDS_MV_TYPES + LIGHT_RAW_RECORDS_NV_TYPES + HIGH_LEVEL_TYPES + RECORDS_TYPES

        # if arg_number has been given
        if arg_number != "":

            # if the "number" argument is a number, it is converted as integer
            if arg_number.isdigit():
                arg_number = int(arg_number)
            # otherwise it is assumed that a DID has been given and run number and other parameters are extracted from the DID
            else:
                arg_number, dtype, hash = get_did(arg_number)
                arg_dtypes = [dtype]

        # if no arg_number has been given, then the "latest" option is activated (with 5 run numbers by default) in compact modality
        else:
            if arg_latest == 0:
                arg_latest = 5
                arg_compact = True

        if arg_latest > 0:
            cursor = db.db.find({}).sort('number', pymongo.DESCENDING).limit(1)
            cursor = list(cursor)
            arg_to = cursor[0]['number']
            arg_number = arg_to - arg_latest + 1
            print('Processing latest {0} runs'.format(arg_latest))

        if arg_to > arg_number:
            cursor = db.db.find({
                'number': {
                    '$gte': arg_number,
                    '$lte': arg_to
                }
            }).sort('number', pymongo.ASCENDING)
            print('Runs that will be processed are from {0} to {1}'.format(
                arg_number, arg_to))
        else:
            cursor = db.db.find({'number': arg_number})

        print('Run that will be processed is {0}'.format(arg_number))
        cursor = list(cursor)

        # Runs over all listed runs
        for run in cursor:

            print("")

            # Gets run number
            number = run['number']
            print('Run: {0}'.format(number))

            # Gets the status
            if 'status' in run:
                print('Status: {0}'.format(run['status']))
            else:
                print('Status: {0}'.format('Not available'))

            if arg_status:
                continue

            # Extracts the correct Event Builder machine who processed this run
            # Then also the bootstrax state and, in case it was abandoned, the reason
            if 'bootstrax' in run:
                bootstrax = run['bootstrax']
                eb = bootstrax['host'].split('.')[0]
                print('Processed by: {0}'.format(eb))
                if 'state' in bootstrax:
                    print('Bootstrax state: {0}'.format(bootstrax['state']))
                    if bootstrax['state'] == 'abandoned':
                        if 'reason' in bootstrax:
                            print('Reason: {0}'.format(bootstrax['reason']))
            else:
                print('Not processed')

            # Gets the date
            if 'start' in run:
                start_time = run['start'].replace(tzinfo=timezone.utc)
                print("Date: ", start_time.astimezone(tz=None))

                # Calculates the duration
                if 'end' in run:
                    if run['end'] is not None:
                        end_time = run['end'].replace(tzinfo=timezone.utc)
                        duration = end_time - start_time
                        print("Duration: ", duration)
                    else:
                        print("Duration: ", "unknown")

                # Prints if run is still enough recent (three days from now)
                now_time = datetime.now().replace(tzinfo=timezone.utc)
                delta_time = now_time - start_time
                if delta_time < timedelta(days=minimum_deltadays_allowed):
                    print("Less than {0} days old".format(
                        minimum_deltadays_allowed))
            else:
                print("Warning : no time info available")

            # Gets the comments
            if 'comments' in run:
                if len(run['comments']) > 0:
                    last_comment = run['comments'][-1]
                    print("Latest comment ({0}): {1}".format(
                        last_comment['user'], last_comment['comment']))

            # Dumps the entire rundoc under json format
            if arg_dumpjson:
                print(dumps(run, indent=4))

            if arg_compact:
                continue

            # Merges data and deleted_data

    #        if 'deleted_data' in run:
    #            data = run['data'] + run['deleted_data']
    #        else:
            data = run['data']

            # Check is there are more instances in more EventBuilders
            extra_ebs = set()
            for d in data:
                if 'eb' in d['host'] and eb not in d['host']:
                    extra_ebs.add(d['host'].split('.')[0])
            if len(extra_ebs) > 0:
                print(
                    '\t\t Warning : The run has been processed by more than one EventBuilder: {0}'
                    .format(extra_ebs))

            # Runs over all data types to be monitored
            for dtype in data_types:

                if len(arg_dtypes) > 0:
                    if dtype not in arg_dtypes:
                        continue

                # Take the official number of files accordingto run DB
                # and the eb status
                Nfiles = -1
                ebstatus = ""
                for d in data:
                    if d['type'] == dtype and eb in d['host']:
                        if 'file_count' in d:
                            Nfiles = d['file_count']
                        if 'status' in d:
                            ebstatus = d['status']

                if arg_pending:
                    if ebstatus in ["", "transferred"]:
                        continue

                # Data type name
                print('{0}'.format(dtype))

                if Nfiles == -1:
                    print('\t Number of files: missing in DB')
                else:
                    print('\t Number of files: {0}'.format(Nfiles))

                if ebstatus != "":
                    print('\t EB status: {0}'.format(ebstatus))
                else:
                    print('\t EB status: not available')

                # Check if data are still in the data list and not in deleted_data
                DB_InEB = False
                for d in run['data']:
                    if d['type'] == dtype and eb in d['host']:
                        DB_InEB = True
                DB_NotInEB = False
                if 'deleted_data' in run:
                    for d in run['deleted_data']:
                        if d['type'] == dtype and eb in d['host']:
                            DB_NotInEB = True
                if DB_InEB and not DB_NotInEB:
                    print('\t DB : still in EB')
                if not DB_InEB and DB_NotInEB:
                    print('\t DB : deleted from EB')
                if DB_InEB and DB_NotInEB:
                    print(
                        '\t\t Incoherency in DB: it is both in data list and in deleted_data list'
                    )
                #if (DB_InEB and DB_NotInEB) or (not DB_InEB and not DB_NotInEB):
                #  print('\t\t incoherency in DB: it is neither in data list nor in deleted_data list')

                # Check if data are still in the EB disks without using the DB
                upload_path = ""
                for d in run['data']:
                    if d['type'] == dtype and eb in d['host']:
                        file = d['location'].split('/')[-1]
                        upload_path = os.path.join(DATADIR, eb, file)
                path_exists = os.path.exists(upload_path)
                if upload_path != "" and path_exists:
                    path, dirs, files = next(os.walk(upload_path))
                    print('\t Disk: still in EB disk and with', len(files),
                          'files')
                else:
                    print('\t Disk: not in EB disk')
                if DB_InEB and not path_exists:
                    print(
                        '\t\t Incoherency in DB and disk: it is in DB data list but it is not in the disk'
                    )
                if DB_NotInEB and path_exists:
                    print(
                        '\t\t Incoherency in DB and disk: it is in DB deleted_data list but it is still in the disk'
                    )

                # The list of DIDs (usually just one)
                dids = set()
                for d in data:
                    if d['type'] == dtype and d['host'] == 'rucio-catalogue':
                        if 'did' in d:
                            dids.add(d['did'])
                print('\t DID:', dids)

                # Check the presence in each available RSE
                Nrses = 0
                for rse in RSES:
                    is_in_rse = False
                    for d in run['data']:
                        if d['type'] == dtype and rse in d['location']:
                            if 'status' in d:
                                status = d['status']
                            else:
                                status = 'Not available'
                            if 'did' in d:
                                hash = d['did'].split('-')[-1]
                                did = d['did']
                            else:
                                print(
                                    '\t\t Warning : DID information is absent in DB data list (old admix version). Using standard hashes for RSEs'
                                )
                                #hash = bkp_hashes.get(dtype)
                                #hash = utilix.db.get_hash(context, dtype)
                                hash = db.GetHashByContext(context, dtype)
                                did = make_did(number, dtype, hash)
                            rucio_rule = rc.GetRule(upload_structure=did,
                                                    rse=rse)
                            files = list_file_replicas(number, dtype, hash,
                                                       rse)
                            if rucio_rule['exists']:
                                print('\t', rse + ': DB Yes, Status', status,
                                      ', Rucio Yes, State',
                                      rucio_rule['state'], ",", len(files),
                                      'files')
                                if len(files) < Nfiles and rucio_rule[
                                        'state'] != "REPLICATING":
                                    print(
                                        '\t\t Warning : Wrong number of files in Rucio!!!'
                                    )
                            else:
                                print('\t', rse + ': DB Yes, Status', status,
                                      ', Rucio No')
                            # print(files)
                            is_in_rse = True
                            Nrses += 1
                    if not is_in_rse:
                        #                    print('\t\t Warning : data information is absent in DB data list. Trying using standard hashes to query Rucio')
                        #                    hash = bkp_hashes.get(dtype)
                        #hash = utilix.db.get_hash(context, dtype)
                        hash = db.GetHashByContext(context, dtype)
                        did = make_did(number, dtype, hash)
                        print('\t Guessed DID:', did)
                        rucio_rule = rc.GetRule(upload_structure=did, rse=rse)
                        files = list_file_replicas(number, dtype, hash, rse)
                        if rucio_rule['exists']:
                            print('\t', rse + ': DB No, Rucio Yes, State',
                                  rucio_rule['state'], ",", len(files),
                                  'files')
                            if len(files) < Nfiles and rucio_rule[
                                    'state'] != "REPLICATING":
                                print(
                                    '\t\t Warning : Wrong number of files in Rucio!!!'
                                )
                        else:
                            print('\t', rse + ': DB No, Rucio No')
                print('\t Number of sites: ', Nrses)
Exemple #13
0
    def __init__(self):

        #Take all data types categories
        self.RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['raw_records_tpc_types']
        self.RAW_RECORDS_MV_TYPES = helper.get_hostconfig(
        )['raw_records_mv_types']
        self.RAW_RECORDS_NV_TYPES = helper.get_hostconfig(
        )['raw_records_nv_types']
        self.LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['light_raw_records_tpc_types']
        self.LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig(
        )['light_raw_records_mv_types']
        self.LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig(
        )['light_raw_records_nv_types']
        self.HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types']
        self.RECORDS_TYPES = helper.get_hostconfig()['records_types']

        #Choose which data type you want to treat
        self.DTYPES = self.RAW_RECORDS_TPC_TYPES + self.RAW_RECORDS_MV_TYPES + self.RAW_RECORDS_NV_TYPES + self.LIGHT_RAW_RECORDS_TPC_TYPES + self.LIGHT_RAW_RECORDS_MV_TYPES + self.LIGHT_RAW_RECORDS_NV_TYPES + self.HIGH_LEVEL_TYPES + self.RECORDS_TYPES

        #Take the list of all XENON RSEs
        self.RSES = helper.get_hostconfig()['rses']

        #Take the RSE that is used to perform the upload
        self.UPLOAD_TO = helper.get_hostconfig()['upload_to']

        #Take the directory where datamanager has to upload data
        self.DATADIR = helper.get_hostconfig()['path_data_to_upload']

        # Get the sequence of rules to be created according to the data type
        self.RAW_RECORDS_TPC_RSES = helper.get_hostconfig(
        )["raw_records_tpc_rses"]
        self.RAW_RECORDS_MV_RSES = helper.get_hostconfig(
        )["raw_records_mv_rses"]
        self.RAW_RECORDS_NV_RSES = helper.get_hostconfig(
        )["raw_records_nv_rses"]
        self.LIGHT_RAW_RECORDS_TPC_RSES = helper.get_hostconfig(
        )["light_raw_records_tpc_rses"]
        self.LIGHT_RAW_RECORDS_MV_RSES = helper.get_hostconfig(
        )["light_raw_records_mv_rses"]
        self.LIGHT_RAW_RECORDS_NV_RSES = helper.get_hostconfig(
        )["light_raw_records_nv_rses"]
        self.HIGH_LEVEL_RSES = helper.get_hostconfig()["high_level_rses"]
        self.RECORDS_RSES = helper.get_hostconfig()["records_rses"]

        #Init the runDB
        self.db = ConnectMongoDB()

        #Init Rucio for later uploads and handling:
        self.rc = RucioSummoner()
        self.didclient = DIDClient()
        self.replicaclient = ReplicaClient()

        #Rucio Rule assignment priority
        self.priority = 3

        #Parameters to write warnings
        self.minimum_number_acceptable_rses = 2
        self.minimum_deltadays_allowed = 3
Exemple #14
0
    def init(self):
        helper.global_dictionary['logger'].Info(
            f'Init task {self.__class__.__name__}')

        #Define data types
        self.RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['raw_records_tpc_types']
        self.RAW_RECORDS_MV_TYPES = helper.get_hostconfig(
        )['raw_records_mv_types']
        self.RAW_RECORDS_NV_TYPES = helper.get_hostconfig(
        )['raw_records_nv_types']
        self.LIGHT_RAW_RECORDS_TPC_TYPES = helper.get_hostconfig(
        )['light_raw_records_tpc_types']
        self.LIGHT_RAW_RECORDS_MV_TYPES = helper.get_hostconfig(
        )['light_raw_records_mv_types']
        self.LIGHT_RAW_RECORDS_NV_TYPES = helper.get_hostconfig(
        )['light_raw_records_nv_types']
        self.HIGH_LEVEL_TYPES = helper.get_hostconfig()['high_level_types']
        self.RECORDS_TYPES = helper.get_hostconfig()['records_types']

        #Get other parameters
        self.DATADIR = helper.get_hostconfig()['path_data_to_upload']
        self.RUCIODATADIR = helper.get_hostconfig()['path_rucio_data']
        self.RSES = helper.get_hostconfig()['rses']

        # Choose which RSE is used for the upload (usually it is LNGS_USERDISK)
        self.UPLOAD_TO = helper.get_hostconfig()['upload_to']

        self.minimum_number_acceptable_rses = 1
        self.minimum_deltadays_allowed = 0  #3
        self.minimum_deltadays_allowed_heavy = 0  #1
        self.dtype_delayed_delete = [
            'raw_records_aqmon', 'raw_records_aqmon_nv', 'raw_records_he',
            'raw_records_mv', 'raw_records_nv', 'pulse_counts',
            'pulse_counts_he', 'veto_regions', 'peaklets', 'peaklets_he',
            'records_he'
        ]
        self.dtype_delayed_delete_heavy = ['raw_records', 'records']
        self.dtype_never_delete = [
            'lone_hits', 'merged_s2s', 'peak_basics', 'peaklet_classification',
            'peak_positions_cnn', 'peak_positions_mlp', 'peak_positions_gcn',
            'peak_positions'
        ]

        #Init the runDB
        self.db = ConnectMongoDB()

        #We want the first and the last run:
        self.gboundary = self.db.GetBoundary()
        self.run_nb_min = self.gboundary['min_number']
        self.run_nb_max = self.gboundary['max_number']
        self.run_ts_min = self.gboundary['min_start_time']
        self.run_ts_max = self.gboundary['max_start_time']

        #Init the Rucio data format evaluator in three steps:
        self.rc_reader = ConfigRucioDataFormat()
        self.rc_reader.Config(helper.get_hostconfig('rucio_template'))

        #This class will evaluate your destinations:
        self.destination = Destination()

        #Since we deal with an experiment, everything is predefine:
        self.exp_temp = Templater()
        self.exp_temp.Config(helper.get_hostconfig()['template'])

        #Init a class to handle keyword strings:
        self.keyw = Keyword()

        #Init Rucio for later uploads and handling:
        self.rc = RucioSummoner(helper.get_hostconfig("rucio_backend"))
        self.rc.SetRucioAccount(helper.get_hostconfig('rucio_account'))
        self.rc.SetConfigPath(helper.get_hostconfig("rucio_cli"))
        self.rc.SetProxyTicket(helper.get_hostconfig('rucio_x509'))
        self.rc.SetHost(helper.get_hostconfig('host'))
        self.rc.ConfigHost()
        self.rc.SetProxyTicket("rucio_x509")
Exemple #15
0
def add_rule_run_dtype(number,dtype,from_rse,to_rse,priority=3):

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))
    RSES = helper.get_hostconfig()['rses']


    DB = ConnectMongoDB()
    rc = RucioSummoner()

    print("Adding a new rule from {0} to {1}".format(from_rse,to_rse))
    print("Run number: {0}".format(number))
    print("Data type: {0}".format(dtype))

    run = DB.db.find_one({'number' : number})

    # Gets the status
    if 'status' in run:
        print('Run status: {0}'.format(run['status']))
    else:
        print('Run status: {0}'.format('Not available'))

    #Checks if the datum of the sender exists in the DB
    datum = None
    for d in run['data']:
        if d['type'] == dtype and d['host'] == 'rucio-catalogue' and d['location'] == from_rse:
            datum = d
            continue
    if datum is None:
        print('The datum concerning data type {0} and site {1} is missing in the DB. Forced to stop'.format(dtype,from_rse))
        return(0)

    print("DID: {0}".format(datum['did']))


    #Checks if the destination datum exists already in the DB
    for d in run['data']:
        if d['type'] == dtype and d['host'] == 'rucio-catalogue' and d['location'] == to_rse:
            print('Rule already exists')
            return(0)


    # Checks the rule status of the sender RSE
    rucio_rule = rc.GetRule(upload_structure=datum['did'], rse=from_rse)
    if rucio_rule['state'] != 'OK':
        print('The rule in {0} is not OK. Forced to stop'.format(from_rse))
        return(0)

    # set the new rule
    result = rc.AddConditionalRule(datum['did'], from_rse, to_rse, lifetime=None, priority=priority)
    rucio_rule = rc.GetRule(datum['did'], rse=to_rse)

    # Update run status
    DB.db.find_one_and_update({'number': number},{'$set': {'status': 'transferring'}})

    # Add a new datum in the run document
    updated_fields = {'host': "rucio-catalogue",
                      'type': dtype,
                      'location': to_rse,
                      'lifetime': rucio_rule['expires'],
                      'status': 'transferring',
                      'did': datum['did'],
                      'protocol': 'rucio'
            }
    data_dict = datum.copy()
    data_dict.update(updated_fields)
    DB.AddDatafield(run['_id'], data_dict)
Exemple #16
0
def reset_upload(did):

    config = Config()
    helper.make_global("admix_config", os.path.abspath(config.get('Admix','config_file')))
    RSES = helper.get_hostconfig()['rses']


    DB = ConnectMongoDB()
    rc = RucioSummoner()

    hash = did.split('-')[-1]
    dtype = did.split('-')[0].split(':')[-1]
    number = int(did.split(':')[0].split('_')[-1])

    print("Resetting the upload associated to the DID: {0}".format(did))
    print("Run number: {0}".format(number))
    print("Data type: {0}".format(dtype))
    print("Hash: {0}".format(hash))

    run = DB.db.find_one({'number' : number})

    # Gets the status
    if 'status' in run:
        print('Run status: {0}'.format(run['status']))
    else:
        print('Run status: {0}'.format('Not available'))

    # Extracts the correct Event Builder machine who processed this run
    # Then also the bootstrax state and, in case it was abandoned, the reason
    if 'bootstrax' in run:
        bootstrax = run['bootstrax']
        eb = bootstrax['host'].split('.')[0]
    else:
        print('Not processed')
        return(0)


    # Get the EB datum and its status
    ebstatus = ""
    datum = None
    for d in run['data']:
        if d['type'] == dtype and eb in d['host']:
            datum = d
            if 'status' in d:
                ebstatus = d['status']

    if datum is None:
        print('There is no EB datum. No reset is possible')
        return(0)

    if ebstatus != "":
        print('EB status: {0}'.format(ebstatus))
    else:
        print('EB status: not available')





    # Step zero (normally not needed): change the run status to "transferring"
    #    DB.db.find_one_and_update({'number':number},{'$set':{"status": "transferring"}})


    # First action: remove the files stored in datamanager
    files = list_file_replicas(number, dtype, hash, "LNGS_USERDISK")
    print("Deleting rucio data in datamanager disk. Deleting",len(files),"files")
    for file in files:
        os.remove(file)



    # Second action: remove the LNGS Rucio rule
    rucio_rule = rc.GetRule(upload_structure=did, rse='LNGS_USERDISK')
    if rucio_rule['exists']:
        print("Deleting rucio rule = ", rucio_rule['id'])
        rc.DeleteRule(rucio_rule['id'])
        # Wait for 1 hour (plus 5 minutes of margin)
        delay = 3600+60*5
        print("We have to wait for {0} seconds before proceeding to the next step".format(delay))
        time.sleep(delay)
    else:
        print("There is no rule to delete")



    # Third action: set the EB status as 'eb_ready_to_upload' 
    DB.db.find_one_and_update({'_id': run['_id'],'data': {'$elemMatch': datum}},
                                   {'$set': {'data.$.status': 'eb_ready_to_upload'}})
    print("EB status changed to eb_ready_to_upload")



    # Reload the run
    run = DB.db.find_one({'number' : number})

    # Gets the status
    if 'status' in run:
        print('New run status: {0}'.format(run['status']))
    else:
        print('Ru status: {0}'.format('Not available'))

    # Get the EB datum and its status
    ebstatus = ""
    datum = None
    for d in run['data']:
        if d['type'] == dtype and eb in d['host']:
            datum = d
            if 'status' in d:
                ebstatus = d['status']

    # Prints the eb status as a confirmation of the performed change 
    if ebstatus != "":
        print('New EB status: {0}'.format(ebstatus))
    else:
        print('New EB status: not available')