Beispiel #1
0
    def insertProcessingEra(self):
        """
        API to insert A Processing Era in DBS

        :param procEraObj: Processing Era object
        :type procEraObj: dict
        :key processing_version: Processing Version (Required)
        :key description: Description (Optional)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy('processing_era', indata)
            indata.update({"creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy() })
            self.dbsProcEra.insertProcessingEra(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler(
                "dbsException-invalid-input2",
                "Wrong format/data from insert ProcessingEra input",
                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = "DBSWriterModel/insertProcessingEra. %s\n. Exception trace: \n %s" \
                            % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)
Beispiel #2
0
    def insertPrimaryDataset(self):
        """
	Inserts a Primary Dataset in DBS
        Gets the input from cherrypy request body.
        input must be a dictionary with the following two keys:
        primary_ds_name, primary_ds_type
        """


	userDN = request.headers.get('Ssl-Client-S-Dn', None)
	access = request.headers.get('Ssl-Client-Verify', None)
	if userDN != '(null)' and access == 'SUCCESS':
	    self.logger.warning("<<<<<<<<<<<<<<<<<<<<<<<<<NO USER DN specified>>>>>>>>>>>>>>>>>>>>>>>")
	    # Means that the user certificate was authenticated by the frontend
	else:
	    self.logger.warning("<<<<<<<<<<<<<<<<<<<<<<<<<USER DN %s specified>>>>>>>>>>>>>>>>>>>>>>>" %userDN)

	
	try :
        	body = request.body.read()
        	indata = cjson.decode(body)
        	indata.update({"creation_date": dbsUtils().getTime(), "create_by": dbsUtils().getCreateBy() })
        	self.dbsPrimaryDataset.insertPrimaryDataset(indata)
		
	except Exception, ex:
		raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #3
0
    def execute(self, conn, logical_file_name, is_file_valid, lost, transaction=False):
        """
        for a given file or a list of files
        """
        if not conn:
           dbsExceptionHandler("dbsException-db-conn-failed",
                               "Oracle/File/UpdateStatus. Expects db connection from upper layer.")

        binds = dict(myuser=dbsUtils().getCreateBy(),
                     mydate=dbsUtils().getTime(),
                     is_file_valid=is_file_valid)

        if isinstance(logical_file_name, list):
            lfn_generator, lfn_binds = create_token_generator(logical_file_name)
            ###with clause - subquery factory does only work with select statements, therefore lfn_generator
            ###has to be place in front of the SELECT statement in the WHERE clause
            ###http://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:8120272301765
            wheresql = """WHERE F.LOGICAL_FILE_NAME in ({lfn_generator} SELECT TOKEN FROM TOKEN_GENERATOR)
            """.format(lfn_generator=lfn_generator)
            binds.update(lfn_binds)
        else:
            wheresql = "where F.LOGICAL_FILE_NAME=:logical_file_name"
            binds.update(logical_file_name=logical_file_name)

        if lost:
            sql = "{sql}, file_size=0 {wheresql}".format(sql=self.sql,
                                                         wheresql=wheresql)
        else:
            sql = "{sql} {wheresql}".format(sql=self.sql,
                                            wheresql=wheresql)

        self.dbi.processData(sql, binds, conn, transaction)
Beispiel #4
0
    def insertPrimaryDataset(self):
        """
        API to insert A primary dataset in DBS

        :param primaryDSObj: primary dataset object
        :type primaryDSObj: dict
        :key primary_ds_type: TYPE (out of valid types in DBS, MC, DATA) (Required)
        :key primary_ds_name: Name of the primary dataset (Required)

        """
        try :
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("primds",indata)
            indata.update({"creation_date": dbsUtils().getTime(), "create_by": dbsUtils().getCreateBy() })
            self.dbsPrimaryDataset.insertPrimaryDataset(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert PrimaryDataset input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = "DBSWriterModel/insertPrimaryDataset. %s\n Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
Beispiel #5
0
    def insertBlock(self):
        """
        gets the input from cherrypy request body.
        input must be a dictionary with the following keys:
        KEYS: required/optional : default = ...
        ...
        """
	
	try:
	
	    body = request.body.read()
	    indata = cjson.decode(body)
	    # Proper validation needed
	    vblock = re.match(r"(/[\w\d_-]+/[\w\d_-]+/[\w\d_-]+)#([\w\d_-]+)$", 
                      indata["block_name"])
	    assert vblock, "Invalid block name %s" % indata["block_name"]
	    block={} 
	    block.update({
                      "dataset":vblock.groups()[0],
                      "creation_date": indata.get("creation_date", dbsUtils().getTime()),
                      "create_by" : indata.get("create_by", dbsUtils().getCreateBy()),
                      "last_modification_date" : dbsUtils().getTime(),
                      "last_modified_by" : dbsUtils().getCreateBy(),
                      "block_name":indata["block_name"],
                      "file_count":indata.get("file_count", 0),
                      "block_size":indata.get("block_size", 0),
                      "origin_site_name": indata.get("origin_site_name"),
                      "open_for_writing": indata.get("open_for_writing", 1),
                      })

	    self.dbsBlock.insertBlock(block)
    
	except Exception, ex:
	    raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #6
0
    def insertDataTier(self):
        """
        API to insert A Data Tier in DBS

        :param dataTierObj: Data Tier object
        :type dataTierObj: dict
        :key data_tier_name: Data Tier that needs to be inserted

        """
        try:
            conn = self.dbi.connection()
            tran = conn.begin()

            body = request.body.read()
            indata = cjson.decode(body)

            indata = validateJSONInputNoCopy("dataTier", indata)

            indata.update({"creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy()})

            indata['data_tier_id'] = self.sequenceManagerDAO.increment(
                conn, "SEQ_DT", tran)
            try:
                indata['data_tier_name'] = indata['data_tier_name'].upper()
            except KeyError as ke:
                dbsExceptionHandler(
                    "dbsException-invalid-input",
                    "DBSWriterModel/insertDataTier. \
                    data_tier_name is required.")
            self.dbsDataTierInsertDAO.execute(conn, indata, tran)
            if tran: tran.commit()
        except cjson.DecodeError as dc:
            dbsExceptionHandler(
                "dbsException-invalid-input2",
                "Wrong format/data from insert DataTier input",
                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            if str(ex).lower().find("unique constraint") != -1 or str(
                    ex).lower().find("duplicate") != -1:
                # already exist
                self.logger.warning(
                    "Unique constraint violation being ignored...")
                self.logger.warning("%s" % ex)
                pass
            else:
                sError = " DBSWriterModel\insertDataTier. %s\n. Exception trace: \n %s" % (
                    ex, traceback.format_exc())
                dbsExceptionHandler(
                    'dbsException-server-error',
                    dbsExceptionCode['dbsException-server-error'],
                    self.logger.exception, sError)
        finally:
            if tran: tran.rollback()
            if conn: conn.close()
Beispiel #7
0
    def insertPrimaryDataset(self):
        """
        API to insert A primary dataset in DBS

        :param primaryDSObj: primary dataset object
        :type primaryDSObj: dict
        :key primary_ds_type: TYPE (out of valid types in DBS, MC, DATA) (Required)
        :key primary_ds_name: Name of the primary dataset (Required)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("primds", indata)
            indata.update({
                "creation_date": dbsUtils().getTime(),
                "create_by": dbsUtils().getCreateBy()
            })
            self.dbsPrimaryDataset.insertPrimaryDataset(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler(
                "dbsException-invalid-input2",
                "Wrong format/data from insert PrimaryDataset input",
                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = "DBSWriterModel/insertPrimaryDataset. %s\n Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)
Beispiel #8
0
    def execute(self, conn, logical_file_name, is_file_valid, lost, dataset, transaction=False):
        """
        for a given file or a list of files
        """
        binds = dict(myuser=dbsUtils().getCreateBy(),
                     mydate=dbsUtils().getTime(),
                     is_file_valid=is_file_valid)

        if logical_file_name and isinstance(logical_file_name, list):
            lfn_generator, lfn_binds = create_token_generator(logical_file_name)
            ###with clause - subquery factory does only work with select statements, therefore lfn_generator
            ###has to be place in front of the SELECT statement in the WHERE clause
            ###http://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:8120272301765
            wheresql = """WHERE F.LOGICAL_FILE_NAME in ({lfn_generator} SELECT TOKEN FROM TOKEN_GENERATOR)
            """.format(lfn_generator=lfn_generator)
            binds.update(lfn_binds)
        elif logical_file_name :
            wheresql = "where F.LOGICAL_FILE_NAME=:logical_file_name"
            binds.update(logical_file_name=logical_file_name)
        elif dataset:
            wheresql = """ where F.dataset_id in ( select D.dataset_id from {owner}DATASETS D 
                           inner join {owner}FILES F2 on F2.dataset_id = D.dataset_id 
                           Where D.dataset=:dataset) """.format(owner=self.owner)
            binds.update(dataset=dataset)

        if lost:
            sql = "{sql}, file_size=0 {wheresql}".format(sql=self.sql,
                                                         wheresql=wheresql)
        else:
            sql = "{sql} {wheresql}".format(sql=self.sql,
                                            wheresql=wheresql)

        self.dbi.processData(sql, binds, conn, transaction)
Beispiel #9
0
    def updateMigrationBlockStatus(self, migration_status=0, migration_block=None, migration_request=None):
        """
        migration_status:
        0=PENDING
        1=IN PROGRESS
        2=COMPLETED
        3=FAILED (will be retried)
        9=Terminally FAILED
        status change:
        0 -> 1
        1 -> 2
        1 -> 3
        1 -> 9
        are only allowed changes for working through migration.
        3 -> 1 allowed for retrying.

        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            if migration_block:
                upst = dict(migration_status=migration_status,
                        migration_block_id=migration_block, last_modification_date=dbsUtils().getTime())
            elif migration_request:
                upst = dict(migration_status=migration_status, migration_request_id=migration_request,
                            last_modification_date=dbsUtils().getTime())
            self.mgrup.execute(conn, upst)
        except:
            if tran:tran.rollback()
            raise
        else:
            if tran:tran.commit()
        finally:
            if conn:conn.close()
Beispiel #10
0
    def insertProcessingEra(self):
        """
        API to insert A Processing Era in DBS

        :param procEraObj: Processing Era object
        :type procEraObj: dict
        :key processing_version: Processing Version (Required)
        :key description: Description (Optional)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy('processing_era', indata)
            indata.update({"creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy() })
            self.dbsProcEra.insertProcessingEra(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert ProcessingEra input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = "DBSWriterModel/insertProcessingEra. %s\n. Exception trace: \n %s" \
                            % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
Beispiel #11
0
    def updateMigrationBlockStatus(self, migration_status=0, migration_block=None, migration_request=None):
        """
        migration_status:
        0=PENDING
        1=IN PROGRESS
        2=COMPLETED
        3=FAILED (will be retried)
        9=Terminally FAILED
        status change:
        0 -> 1
        1 -> 2
        1 -> 3
        1 -> 9
        are only allowed changes for working through migration.
        3 -> 1 allowed for retrying.

        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            if migration_block:
                upst = dict(migration_status=migration_status,
                        migration_block_id=migration_block, last_modification_date=dbsUtils().getTime())
            elif migration_request:
                upst = dict(migration_status=migration_status, migration_request_id=migration_request,
                            last_modification_date=dbsUtils().getTime())
            self.mgrup.execute(conn, upst)
        except:
            if tran:tran.rollback()
            raise
        else:
            if tran:tran.commit()
        finally:
            if conn:conn.close()
Beispiel #12
0
    def submit(self):
        """
        Interface for submitting a migration request.
        Required input keys:
        MIGRATION_URL: The source DBS url for migration.
        MIGRATION_INPUT: The block or dataset names to be migrated.
        """
        body = request.body.read()
        indata = cjson.decode(body)
        try:
            indata = validateJSONInputNoCopy("migration_rqst", indata)
            indata.update({"creation_date": dbsUtils().getTime(),
                "last_modification_date" : dbsUtils().getTime(),
                "create_by" : dbsUtils().getCreateBy() ,
                "last_modified_by" : dbsUtils().getCreateBy(),
                "migration_status": 0})
            return self.dbsMigrate.insertMigrationRequest(indata)
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)
        except Exception as ex:
            sError = "DBSMigrateModle/submit. %s\n Exception trace: \n %s." \
                     % (ex, traceback.format_exc() )
            if hasattr(ex, 'status') and ex.status == 400:
		dbsExceptionHandler('dbsException-invalid-input2', str(ex), self.logger.exception, sError)
	    else:	
		dbsExceptionHandler('dbsException-server-error',  str(ex), self.logger.exception, sError)
Beispiel #13
0
    def insertAcquisitionEra(self):
        """
        API to insert an Acquisition Era in DBS

        :param acqEraObj: Acquisition Era object
        :type acqEraObj: dict
        :key acquisition_era_name: Acquisition Era Name (Required)
        :key start_date: start date of the acquisition era (unixtime, int) (Optional, default current date)
        :key end_date: end data of the acquisition era (unixtime, int) (Optional)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("acquisition_era",indata)
            indata.update({"start_date": indata.get("start_date", dbsUtils().getTime()),\
                           "creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy() })
            self.dbsAcqEra.insertAcquisitionEra(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert AcquisitionEra input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = " DBSWriterModel/insertAcquisitionEra. %s\n. Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
Beispiel #14
0
    def insertOutputConfig(self):
        """
        API to insert An OutputConfig in DBS

        :param outputConfigObj: Output Config object
        :type outputConfigObj: dict
        :key app_name: App Name (Required)
        :key release_version: Release Version (Required)
        :key pset_hash: Pset Hash (Required)
        :key output_module_label: Output Module Label (Required)
        :key global_tag: Global Tag (Required)
        :key scenario: Scenario (Optional, default is None)
        :key pset_name: Pset Name (Optional, default is None)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("dataset_conf_list",indata)
            indata.update({"creation_date": dbsUtils().getTime(),
                           "create_by" : dbsUtils().getCreateBy()})
            self.dbsOutputConfig.insertOutputConfig(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert OutputConfig input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = "DBSWriterModel/insertOutputConfig. %s\n. Exception trace: \n %s" \
                            % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
Beispiel #15
0
 def submit(self):
     """
     Interface for submitting a migration request.
     Required input keys:
     MIGRATION_URL: The source DBS url for migration.
     MIGRATION_INPUT: The block or dataset names to be migrated.
     """
     body = request.body.read()
     indata = cjson.decode(body)
     try:
         indata = validateJSONInputNoCopy("migration_rqst", indata)
         indata.update({
             "creation_date": dbsUtils().getTime(),
             "last_modification_date": dbsUtils().getTime(),
             "create_by": dbsUtils().getCreateBy(),
             "last_modified_by": dbsUtils().getCreateBy(),
             "migration_status": 0
         })
         return self.dbsMigrate.insertMigrationRequest(indata)
     except dbsException as de:
         dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                             de.serverError)
     except Exception as ex:
         sError = "DBSMigrateModle/submit. %s\n Exception trace: \n %s." \
                  % (ex, traceback.format_exc() )
         if hasattr(ex, 'status') and ex.status == 400:
             dbsExceptionHandler('dbsException-invalid-input2', str(ex),
                                 self.logger.exception, sError)
         else:
             dbsExceptionHandler('dbsException-server-error', str(ex),
                                 self.logger.exception, sError)
Beispiel #16
0
    def insertAcquisitionEra(self):
        """
        API to insert an Acquisition Era in DBS

        :param acqEraObj: Acquisition Era object
        :type acqEraObj: dict
        :key acquisition_era_name: Acquisition Era Name (Required)
        :key start_date: start date of the acquisition era (unixtime, int) (Optional, default current date)
        :key end_date: end data of the acquisition era (unixtime, int) (Optional)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("acquisition_era", indata)
            indata.update({"start_date": indata.get("start_date", dbsUtils().getTime()),\
                           "creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy() })
            self.dbsAcqEra.insertAcquisitionEra(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler(
                "dbsException-invalid-input2",
                "Wrong format/data from insert AcquisitionEra input",
                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.serverError)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = " DBSWriterModel/insertAcquisitionEra. %s\n. Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)
Beispiel #17
0
    def insertFile(self, qInserts=False):
        """
        API to insert a list of file into DBS in DBS. Up to 10 files can be inserted in one request.

        :param qInserts: True means that inserts will be queued instead of done immediately. INSERT QUEUE Manager will perform the inserts, within few minutes.
        :type qInserts: bool
        :param filesList: List of dictionaries containing following information
        :type filesList: list of dicts
        :key logical_file_name: File to be inserted (str) (Required)
        :key is_file_valid: (optional, default = 1): (bool)
        :key block: required: /a/b/c#d (str)
        :key dataset: required: /a/b/c (str)
        :key file_type: (optional, default = EDM) one of the predefined types, (str)
        :key check_sum: (optional, default = '-1') (str)
        :key event_count: (optional, default = -1) (int)
        :key file_size: (optional, default = -1.) (float)
        :key adler32: (optional, default = '') (str)
        :key md5: (optional, default = '') (str)
        :key auto_cross_section: (optional, default = -1.) (float)
        :key file_lumi_list: (optional, default = []) [{'run_num': 123, 'lumi_section_num': 12},{}....]
        :key file_parent_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]
        :key file_assoc_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]
        :key file_output_config_list: (optional, default = []) [{'app_name':..., 'release_version':..., 'pset_hash':...., output_module_label':...},{}.....]

        """
        if qInserts in (False, 'False'): qInserts=False
        try:
            body = request.body.read()
            indata = cjson.decode(body)["files"]
            if not isinstance(indata, (list,dict)):
                 dbsExceptionHandler("dbsException-invalid-input", "Invalid Input DataType", self.logger.exception, \
                                      "insertFile expects input as list or dirc")
            businput = []
            if type(indata) == dict:
                indata = [indata]
            indata = validateJSONInputNoCopy("files",indata)
            for f in indata:
                f.update({
                     #"dataset":f["dataset"],
                     "creation_date": f.get("creation_date", dbsUtils().getTime()),
                     "create_by" : dbsUtils().getCreateBy(),
                     "last_modification_date": f.get("last_modification_date", dbsUtils().getTime()),
                     "last_modified_by": f.get("last_modified_by" , dbsUtils().getCreateBy()),
                     "file_lumi_list":f.get("file_lumi_list",[]),
                     "file_parent_list":f.get("file_parent_list",[]),
                     "file_assoc_list":f.get("assoc_list",[]),
                     "file_output_config_list":f.get("file_output_config_list",[])})
                businput.append(f)
            self.dbsFile.insertFile(businput, qInserts)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert File input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = "DBSWriterModel/insertFile. %s\n. Exception trace: \n %s" \
                    % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
Beispiel #18
0
    def handleMigration(self):
        """
        The actual handle method for performing migration

        * The method takes a request and tries to complete it till end, this way we 
        * do not have incomplete running migrations running forever
        
        3. Get the highest order 'PENDING' block
        4. Change the status of block to RUNNING
        5. Migrate it
        6. Change the block status to 'COMPLETED' (?remove from the list?)
        7. Pick the next block, go to 4.
        8. After no more blocks can be migrated, mark the request as DONE (move to history table!!!!)

        Migration request has four status: 0=pending, 1=running, 3=Completed and 4=failed.
        Migration block has three status: 0=pending, 3=completed and 4=failed
        """
        
        request={}
        requestID=-1
        try :
            #1 get a migration request in 0 (PENDING) STATUS & Change its status to 1 (RUNNING)
            print "get a migration request in 0 (PENDING) STATUS & Change its status to 1 (RUNNING)"
            conn = self.dbi.connection()
            #connx is for sequence table only.
            connx = self.dbi.connection()
            request = self.getMigrationRequest(conn)
            print "request = %s" %request
            if not request:
                return 
            
            #2 find the highest order pending block                    
            print "find the highest order pending block"
            requestID=request["migration_request_id"]
            #import pdb
            #pdb.set_trace()
            blocks = self.fmb.execute(conn, requestID)
            for ablock in blocks:
                print "migrate block by block!"
                self.migrateBlock(conn, connx, ablock['migration_block_name'], request["migration_url"])
            #Finally mark the request as 3=Completed
            print "Finally mark the request as 3=Completed"
            tran = conn.begin()
            self.urs.execute(conn, requestID, 3, self.threadID, dbsUtils().getTime(), transaction=tran)
            tran.commit();
        except Exception, ex:
            status = 0
            if ("Source Server is down !") in str(ex):
                self.logger.exception("DBS Migration Service failed to perform migration %s due to the source server is down" %str(ex))
            else:
                self.logger.exception("DBS Migration Service failed to perform migration %s" %str(ex))
                #FAILED=4
                status = 4
            tran = conn.begin()
            self.urs.execute(conn, requestID, status, self.threadID, dbsUtils().getTime(), transaction=tran)
            tran.commit()
            raise
Beispiel #19
0
    def submit(self):
	"""
	Interface for submitting a migration request
	"""
	body = request.body.read()
	indata = cjson.decode(body)
	indata.update({"creation_date": dbsUtils().getTime(), \
		"last_modification_date" : dbsUtils().getTime(), \
		"create_by" : dbsUtils().getCreateBy() , "last_modified_by" : 0 })
	return self.dbsMigrate.insertMigrationRequest(indata)
Beispiel #20
0
    def insertBlock(self, businput):
        """
        Input dictionary has to have the following keys:
        blockname
        
        It may have:
        open_for_writing, origin_site(name), block_size,
        file_count, creation_date, create_by, last_modification_date, last_modified_by
        
        it builds the correct dictionary for dao input and executes the dao

        NEED to validate there are no extra keys in the businput
        """
        if not ("block_name" in businput and "origin_site_name" in businput  ):
            dbsExceptionHandler('dbsException-invalid-input', "business/DBSBlock/insertBlock must have block_name and origin_site_name as input")
        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            blkinput = {
                "last_modification_date":businput.get("last_modification_date",  dbsUtils().getTime()),
                #"last_modified_by":businput.get("last_modified_by", dbsUtils().getCreateBy()),
                "last_modified_by":dbsUtils().getCreateBy(),
                #"create_by":businput.get("create_by", dbsUtils().getCreateBy()),
                "create_by":dbsUtils().getCreateBy(),
                "creation_date":businput.get("creation_date", dbsUtils().getTime()),
                "open_for_writing":businput.get("open_for_writing", 1),
                "block_size":businput.get("block_size", 0),
                "file_count":businput.get("file_count", 0),
                "block_name":businput.get("block_name"),
                "origin_site_name":businput.get("origin_site_name")
            }
            ds_name = businput["block_name"].split('#')[0]
            blkinput["dataset_id"] = self.datasetid.execute(conn,  ds_name, tran)
            if blkinput["dataset_id"] == -1 : 
                msg = "DBSBlock/insertBlock. Dataset %s does not exists" % ds_name
                dbsExceptionHandler('dbsException-missing-data', msg)
            blkinput["block_id"] =  self.sm.increment(conn, "SEQ_BK", tran)
            self.blockin.execute(conn, blkinput, tran)

            tran.commit()
            tran = None
        except Exception as e:
            if str(e).lower().find("unique constraint") != -1 or str(e).lower().find("duplicate") != -1:
                pass
            else:
                if tran:
                    tran.rollback()
                if conn: conn.close()
                raise
                
        finally:
            if tran:
                tran.rollback()
            if conn:
                conn.close()
Beispiel #21
0
    def insertBlock(self, businput):
        """
        Input dictionary has to have the following keys:
        blockname
        
        It may have:
        open_for_writing, origin_site(name), block_size,
        file_count, creation_date, create_by, last_modification_date, last_modified_by
        
        it builds the correct dictionary for dao input and executes the dao

        NEED to validate there are no extra keys in the businput
        """
        if not ("block_name" in businput and "origin_site_name" in businput  ):
            dbsExceptionHandler('dbsException-invalid-input', "business/DBSBlock/insertBlock must have block_name and origin_site_name as input")
        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            blkinput = {
                "last_modification_date":businput.get("last_modification_date",  dbsUtils().getTime()),
                #"last_modified_by":businput.get("last_modified_by", dbsUtils().getCreateBy()),
                "last_modified_by":dbsUtils().getCreateBy(),
                #"create_by":businput.get("create_by", dbsUtils().getCreateBy()),
                "create_by":dbsUtils().getCreateBy(),
                "creation_date":businput.get("creation_date", dbsUtils().getTime()),
                "open_for_writing":businput.get("open_for_writing", 1),
                "block_size":businput.get("block_size", 0),
                "file_count":businput.get("file_count", 0),
                "block_name":businput.get("block_name"),
                "origin_site_name":businput.get("origin_site_name")
            }
            ds_name = businput["block_name"].split('#')[0]
            blkinput["dataset_id"] = self.datasetid.execute(conn,  ds_name, tran)
            if blkinput["dataset_id"] == -1 : 
                msg = "DBSBlock/insertBlock. Dataset %s does not exists" % ds_name
                dbsExceptionHandler('dbsException-missing-data', msg)
            blkinput["block_id"] =  self.sm.increment(conn, "SEQ_BK", tran)
            self.blockin.execute(conn, blkinput, tran)

            tran.commit()
            tran = None
        except Exception as e:
            if str(e).lower().find("unique constraint") != -1 or str(e).lower().find("duplicate") != -1:
                pass
            else:
                if tran:
                    tran.rollback()
                if conn: conn.close()
                raise
                
        finally:
            if tran:
                tran.rollback()
            if conn:
                conn.close()
Beispiel #22
0
    def insertDataTier(self):
	"""
	Inserts a data tier in DBS
	"""

	try:
		body = request.body.read()
		indata = cjson.decode(body)
		indata.update({"creation_date": dbsUtils().getTime(), "create_by" : dbsUtils().getCreateBy() })
		self.dbsDataTier.insertDataTier(indata)
	except Exception, ex:
		raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #23
0
    def execute(self, conn, block_name, origin_site_name, transaction=False):
        """
        Update origin_site_name for a given block_name
        """
        if not conn:
            dbsExceptionHandler(
                "dbsException-db-conn-failed", "Oracle/Block/UpdateStatus. \
Expects db connection from upper layer.")
        binds = {
            "block_name": block_name,
            "origin_site_name": origin_site_name,
            "mtime": dbsUtils().getTime(),
            "myuser": dbsUtils().getCreateBy()
        }
        self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #24
0
    def insertProcessingEra(self):
	"""
	Insert an ProcessingEra in DBS
	Gets the input from cherrypy request body.
	input must be a dictionary with at least the following keys:
	'processing_version', 'description'

        """
        try:
                body = request.body.read()
                indata = cjson.decode(body)
                indata.update({"creation_date": dbsUtils().getTime(), "create_by" : dbsUtils().getCreateBy() })
                self.dbsProcEra.insertProcessingEra(indata)

        except Exception, ex:
                    raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #25
0
    def execute(self, conn, daoinput, transaction=False):
        """
	    daoinput keys:
	    migration_request_id
        """
        if not conn:
            dbsExceptionHandler(
                "dbsException-db-conn-failed",
                "Oracle/MigrationRequests/Remove. Expects db connection from upper layer."
            )
        daoinput['create_by'] = dbsUtils().getCreateBy()
        try:
            msg = "DBSMigration: Invalid request. Sucessfully processed or processing requests cannot be removed,\
                    or the requested migration did not exist, or the requestor for removing and creating has to be the same user. "

            checkit = self.dbi.processData(self.select, daoinput, conn,
                                           transaction)
            if self.formatDict(checkit)[0]["count"] >= 1:
                reqID = {'migration_rqst_id': daoinput['migration_rqst_id']}
                result = self.dbi.processData(self.sql, reqID, conn,
                                              transaction)
            else:
                dbsExceptionHandler('dbsException-invalid-input', msg)
        except:
            raise
Beispiel #26
0
    def getMigrationRequest(self, conn):
        """
        Find a pending request from the queued requests (in database) and update its status to 1 (running)
        --atomic operation
        """
        request=[]
#	import pdb
#       pdb.set_trace()
        try:
            tran = conn.begin()
            #get the pending request(status=0)
            request = self.fpr.execute(conn,tran)
            if len(request) <= 0:
                #not found request, goodby.
                tran.rollback()
                return {}
            else:            
                requestID=request[0]["migration_request_id"]
                migration_status = 1
                #update the request to 1 (running)
                self.urs.execute(conn, requestID, 1, self.threadID, dbsUtils().getTime(),tran)
                tran.commit()
                return request[0]
        except Exception, ex:
            self.logger.exception("DBS Migrate Service Failed to find migration requests")
            if tran:
                tran.rollback()
            raise Exception("DBS Migrate Service Failed to find migration requests: %s" %ex)
Beispiel #27
0
    def updateMigrationRequestStatus(self, migration_status,
                                     migration_request_id):
        """
        migration_status:
        0=PENDING
        1=IN PROGRESS
        2=COMPLETED
        3=FAILED (will be retried)
        9=Terminally FAILED 
        status change:
        0 -> 1
        1 -> 2
        1 -> 3
        1 -> 9
        are only allowed changes for working through migration.
        3 -> 1 is allowed for retrying and retry count +1.

        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            upst = dict(migration_status=migration_status,
                        migration_request_id=migration_request_id,
                        last_modification_date=dbsUtils().getTime())
            self.mgrRqUp.execute(conn, upst)
        except:
            if tran: tran.rollback()
            raise
        else:
            if tran: tran.commit()
        finally:
            #open transaction is committed when conn closed.
            if conn: conn.close()
Beispiel #28
0
    def updateMigrationRequestStatus(self, migration_status, migration_request_id):
        """
        migration_status:
        0=PENDING
        1=IN PROGRESS
        2=COMPLETED
        3=FAILED (will be retried)
        9=Terminally FAILED 
        status change:
        0 -> 1
        1 -> 2
        1 -> 3
        1 -> 9
        are only allowed changes for working through migration.
        3 -> 1 is allowed for retrying and retry count +1.

        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            upst = dict(migration_status=migration_status,
                        migration_request_id=migration_request_id,
                        last_modification_date=dbsUtils().getTime())
            self.mgrRqUp.execute(conn, upst)
        except:
            if tran:tran.rollback()
            raise
        else:
            if tran:tran.commit()
        finally:
            #open transaction is committed when conn closed.
            if conn:conn.close()
Beispiel #29
0
 def execute(self, conn, dataset, dataset_access_type, transaction=False):
     """
     for a given file
     """
     if not conn:
         dbsExceptionHandler(
             "dbsException-failed-connect2host",
             "Oracle/Dataset/UpdateType.  Expects db connection from upper layer.",
             self.logger.exception)
     binds = {
         "dataset": dataset,
         "dataset_access_type": dataset_access_type,
         "myuser": dbsUtils().getCreateBy(),
         "mydate": dbsUtils().getTime()
     }
     result = self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #30
0
    def insertDataTier(self):
        """
        API to insert A Data Tier in DBS

        :param dataTierObj: Data Tier object
        :type dataTierObj: dict
        :key data_tier_name: Data Tier that needs to be inserted

        """
        try:
            conn = self.dbi.connection()
            tran = conn.begin()

            body = request.body.read()
            indata = cjson.decode(body)

            indata = validateJSONInputNoCopy("dataTier", indata)

            indata.update({"creation_date": indata.get("creation_date", dbsUtils().getTime()), \
                           "create_by" : dbsUtils().getCreateBy()})

            indata['data_tier_id'] = self.sequenceManagerDAO.increment(conn, "SEQ_DT", tran)
            try:
                indata['data_tier_name'] = indata['data_tier_name'].upper()
            except KeyError as ke:
                dbsExceptionHandler("dbsException-invalid-input", "DBSWriterModel/insertDataTier. \
                    data_tier_name is required.")
            self.dbsDataTierInsertDAO.execute(conn, indata, tran)
            if tran: tran.commit()
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert DataTier input",  self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            if str(ex).lower().find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1:
                # already exist
                self.logger.warning("Unique constraint violation being ignored...")
                self.logger.warning("%s" % ex)
                pass
            else:
                sError = " DBSWriterModel\insertDataTier. %s\n. Exception trace: \n %s" % (ex, traceback.format_exc())
                dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
        finally:
            if tran: tran.rollback()
            if conn: conn.close()
Beispiel #31
0
    def insertDataset(self):
        """
        API to insert a dataset in DBS

        :param datasetObj: Dataset object
        :type datasetObj: dict
        :key primary_ds_name: Primary Dataset Name (Required)
        :key dataset: Name of the dataset (Required)
        :key dataset_access_type: Dataset Access Type (Required)
        :key processed_ds_name: Processed Dataset Name (Required)
        :key data_tier_name: Data Tier Name (Required)
        :key acquisition_era_name: Acquisition Era Name (Required)
        :key processing_version: Processing Version (Required)
        :key physics_group_name: Physics Group Name (Optional, default None)
        :key prep_id: ID of the Production and Reprocessing management tool (Optional, default None)
        :key xtcrosssection: Xtcrosssection (Optional, default None)
        :key output_configs: List(dict) with keys release_version, pset_hash, app_name, output_module_label and global tag

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy('dataset', indata)
            indata.update({
                "creation_date": dbsUtils().getTime(),
                "last_modification_date": dbsUtils().getTime(),
                "create_by": dbsUtils().getCreateBy(),
                "last_modified_by": dbsUtils().getCreateBy()
            })

            # need proper validation
            self.dbsDataset.insertDataset(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2",
                                "Wrong format/data from insert dataset input",
                                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = " DBSWriterModel/insertDataset. %s\n. Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)
Beispiel #32
0
    def execute(self,
                conn,
                migration_url="",
                migration_input="",
                create_by="",
                migration_request_id="",
                oldest=False,
                transaction=False):
        """
        Lists all requests if pattern is not provided.
        """
        sql = self.sql
        binds = {}
        if migration_request_id:
            sql += " WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id"
            binds['migration_request_id'] = migration_request_id
        elif oldest:
            #FIXME: Need to write the sql.YG
            #current_date = dbsUtils().getTime()
            #we require waiting time for
            #retry_count=0 is 1 minutes
            #retry_count=1 is 2 minutes
            #retyr_count=2 is 4 minutes

            sql += """
                       WHERE MR.MIGRATION_STATUS=0 
                       or (MR.migration_status=3 and MR.retry_count=0 and MR.last_modification_date <= :current_date-60)    
                       or (MR.migration_status=3 and MR.retry_count=1 and MR.last_modification_date <= :current_date-120)  
                       or (MR.migration_status=3 and MR.retry_count=2 and MR.last_modification_date <= :current_date-240)
                       ORDER BY MR.creation_date
                   """
            binds['current_date'] = dbsUtils().getTime()
            #print "time= " + str(binds['current_date'])
        else:
            if migration_url or migration_input or create_by:
                sql += " WHERE "
            if migration_url:
                sql += " MR.MIGRATION_URL=:migration_url"
                binds['migration_url'] = migration_url
            if migration_input:
                if migration_url:
                    sql += " AND "
                op = ("=", "like")["%" in migration_input]
                sql += " MR.MIGRATION_INPUT %s :migration_input" % op
                binds['migration_input'] = migration_input
            if create_by:
                if migration_url or migration_input:
                    sql += " AND "
                sql += " MR.CREATE_BY=:create_by" % create_by
                binds['create_by'] = create_by
        cursors = self.dbi.processData(sql,
                                       binds,
                                       conn,
                                       transaction,
                                       returnCursor=True)
        result = []
        for c in cursors:
            result.extend(self.formatCursor(c))
        return result
Beispiel #33
0
    def execute ( self, conn, dataset, is_dataset_valid, transaction=False ):
        """
        for a given file
        """	
	if not conn:
            dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/Dataset/UpdateStatus.  Expects db connection from upper layer.")
	binds = { "dataset" : dataset , "is_dataset_valid" : is_dataset_valid, "mydate": dbsUtils().getTime(), "myuser": dbsUtils().getCreateBy()}
        result = self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #34
0
    def insertFile(self, qInserts=True):
        """
        gets the input from cherrypy request body
        input must be a (list of) dictionary with the following keys: <br />
        logical_file_name (required) : string  <br />
        is_file_valid: (optional, default = 1): 1/0 <br />
        block_name, required: /a/b/c#d <br />
        dataset, required: /a/b/c <br />
        file_type (optional, default = EDM): one of the predefined types, <br />
        check_sum (optional, default = '-1'): string <br />
        event_count (optional, default = -1): int <br />
        file_size (optional, default = -1.): float <br />
        adler32 (optional, default = ''): string <br />
        md5 (optional, default = ''): string <br />
        auto_cross_section (optional, default = -1.): float <br />
	    file_lumi_list (optional, default = []): [{"run_num": 123, "lumi_section_num": 12},{}....] <br />
	    file_parent_list(optional, default = []) :[{"file_parent_lfn": "mylfn"},{}....] <br />
	    file_assoc_list(optional, default = []) :[{"file_parent_lfn": "mylfn"},{}....] <br />
	    file_output_config_list(optional, default = []) :[{"app_name":..., "release_version":..., "pset_hash":...., output_module_label":...},{}.....] <br />
        """
	if qInserts in (False, 'False'): qInserts=False
	try:
	    body = request.body.read()
	    indata = cjson.decode(body)["files"]
        
	    # proper validation needed
	    businput = []
	    assert type(indata) in (list, dict)
	    if type(indata) == dict:
		indata = [indata]
	    for f in indata:
		f.update({
		     #"dataset":f["dataset"],
                     "creation_date": f.get("creation_date", dbsUtils().getTime()),
                     "create_by" : f.get("create_by" , dbsUtils().getCreateBy()),
                     "last_modification_date": f.get("last_modification_date", dbsUtils().getTime()),
                     "last_modified_by": f.get("last_modified_by" , dbsUtils().getCreateBy()),
                     "file_lumi_list":f.get("file_lumi_list",[]),
                     "file_parent_list":f.get("file_parent_list",[]),
		     "file_assoc_list":f.get("assoc_list",[]),
                     "file_output_config_list":f.get("file_output_config_list",[])})
		businput.append(f)
	    self.dbsFile.insertFile(businput, qInserts)
    
	except Exception, ex:
	    raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #35
0
    def execute(self,
                conn,
                logical_file_name,
                is_file_valid,
                lost,
                dataset,
                transaction=False):
        """
        for a given file or a list of files
        """
        if not conn:
            dbsExceptionHandler(
                "dbsException-db-conn-failed",
                "Oracle/File/UpdateStatus. Expects db connection from upper layer."
            )

        binds = dict(myuser=dbsUtils().getCreateBy(),
                     mydate=dbsUtils().getTime(),
                     is_file_valid=is_file_valid)

        if logical_file_name and isinstance(logical_file_name, list):
            lfn_generator, lfn_binds = create_token_generator(
                logical_file_name)
            ###with clause - subquery factory does only work with select statements, therefore lfn_generator
            ###has to be place in front of the SELECT statement in the WHERE clause
            ###http://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:8120272301765
            wheresql = """WHERE F.LOGICAL_FILE_NAME in ({lfn_generator} SELECT TOKEN FROM TOKEN_GENERATOR)
            """.format(lfn_generator=lfn_generator)
            binds.update(lfn_binds)
        elif logical_file_name:
            wheresql = "where F.LOGICAL_FILE_NAME=:logical_file_name"
            binds.update(logical_file_name=logical_file_name)
        elif dataset:
            wheresql = """ where F.dataset_id in ( select D.dataset_id from {owner}DATASETS D 
                           inner join {owner}FILES F2 on F2.dataset_id = D.dataset_id 
                           Where D.dataset=:dataset) """.format(
                owner=self.owner)
            binds.update(dataset=dataset)

        if lost:
            sql = "{sql}, file_size=0 {wheresql}".format(sql=self.sql,
                                                         wheresql=wheresql)
        else:
            sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql)

        self.dbi.processData(sql, binds, conn, transaction)
Beispiel #36
0
    def insertOutputConfig(self):
        """
        Insert an output configuration (formely known as algorithm config) in DBS
        Gets the input from cherrypy request body.
        input must be a dictionary with at least the following keys:
        app_name, release_version, pset_hash , output_module_label

        """

        try:
                body = request.body.read()
                indata = cjson.decode(body)
                indata.update({"creation_date": dbsUtils().getTime(), \
                                "create_by" : dbsUtils().getCreateBy() , "last_modified_by" : dbsUtils().getCreateBy() })
                # need proper validation
                self.dbsOutputConfig.insertOutputConfig(indata)

        except Exception, ex:
                raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #37
0
 def execute ( self, conn, block_name, open_for_writing, ltime, transaction=False ):
     """
     for a given file
     """	
     if not conn:
         dbsExceptionHandler("dbsException-db-conn-failed",
                             "Oracle/Block/UpdateStatus. Expects db connection from upper layer.")
     binds = {"block_name": block_name ,"open_for_writing": open_for_writing , 'ltime': ltime,
              'myuser': dbsUtils().getCreateBy()}
     self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #38
0
    def execute(self, conn, block_name, origin_site_name, transaction=False):
        """
        Update origin_site_name for a given block_name
        """
        if not conn:
            dbsExceptionHandler("dbsException-failed-connect2host", "Oracle/Block/UpdateStatus. \
Expects db connection from upper layer.", self.logger.exception)
        binds = {"block_name": block_name, "origin_site_name": origin_site_name, "mtime": dbsUtils().getTime(),
                 "myuser": dbsUtils().getCreateBy()}
        self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #39
0
    def insertBlockFile(self, blockcontent, datasetId, migration=False):

        block = blockcontent['block']
        newBlock = False
        #Insert the block
        conn = self.dbi.connection()
        tran = conn.begin()
        self.logger.info("Inserted block name: %s" %block['block_name'])
        try:
            block['block_id'] = self.sm.increment(conn, "SEQ_BK",)
            block['dataset_id'] =  datasetId
            if not migration:
                block['creation_date'] = dbsUtils().getTime()
                block['create_by'] = dbsUtils().getCreateBy()
                block['last_modification_date'] = dbsUtils().getTime()
                block['last_modified_by'] = dbsUtils().getCreateBy()
            self.blockin.execute(conn, block, tran)
            newBlock = True
        except exceptions.IntegrityError, ex:
            if (str(ex).find("ORA-00001") != -1 and str(ex).find("TUC_BK_BLOCK_NAME") != -1) or str(ex).lower().find("duplicate") != -1:
            #not sure what happens to WMAgent: Does it try to insert a
            #block again? YG 10/05/2010
            #Talked with Matt N: We should stop inserting this block now.
            #This means there is some trouble.
            #Throw exception to let the up layer know. YG 11/17/2010
                if tran:tran.rollback()
                if conn:conn.close()
                dbsExceptionHandler("dbsException-invalid-input2","DBSBlockInsert/insertBlock. Block %s already exists." % (block['block_name']))
            elif str(ex).find("ORA-01400") > -1:
                if tran:tran.rollback()
                if conn:conn.close()
                dbsExceptionHandler('dbsException-missing-data',
                    'Missing data when insert Blocks. ',
                    self.logger.exception,
                    'Missing data when insert Blocks. '+ str(ex))
            else:
                if tran:tran.rollback()
                if conn:conn.close()
                dbsExceptionHandler('dbsException-invalid-input2',
                            'Invalid data when insert Blocks. ',
                            self.logger.exception,
                            'Invalid data when insert Blocks. '+ str(ex))
Beispiel #40
0
    def insertDataset(self):
        """
        gets the input from cherrypy request body.
        input must have the following keys:
        KEYS : required/optional:default = ...
        ...
        """

        try:
                body = request.body.read()
                indata = cjson.decode(body)

                indata.update({"creation_date": dbsUtils().getTime(), \
				"last_modification_date" : dbsUtils().getTime(), \
				"create_by" : dbsUtils().getCreateBy() , "last_modified_by" : dbsUtils().getCreateBy() })
                
		# need proper validation
                self.dbsDataset.insertDataset(indata)

        except Exception, ex:
                raise Exception ("DBS Server Exception: %s \n. Exception trace: \n %s " % (ex, traceback.format_exc()) )
Beispiel #41
0
    def execute(self, conn, daoinput, transaction = False):
        """
	    daoinput keys:
	    migration_request_id
        """
        if not conn:
	    dbsExceptionHandler("dbsException-db-conn-failed","Oracle/MigrationRequests/Remove. Expects db connection from upper layer.")
        daoinput['create_by'] = dbsUtils().getCreateBy()
        result = self.dbi.processData(self.sql, daoinput, conn, transaction)
        if result[0].rowcount == 0:
            dbsExceptionHandler('dbsException-invalid-input2',"DBSMigration: Invalid request. Sucessfully processed or processing requests cannot \
                                be removed, or the requested migration did not exist, or the requestor for removing and creating has to be the same user. ")
Beispiel #42
0
    def insertOutputConfig(self):
        """
        API to insert An OutputConfig in DBS

        :param outputConfigObj: Output Config object
        :type outputConfigObj: dict
        :key app_name: App Name (Required)
        :key release_version: Release Version (Required)
        :key pset_hash: Pset Hash (Required)
        :key output_module_label: Output Module Label (Required)
        :key global_tag: Global Tag (Required)
        :key scenario: Scenario (Optional, default is None)
        :key pset_name: Pset Name (Optional, default is None)

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy("dataset_conf_list", indata)
            indata.update({
                "creation_date": dbsUtils().getTime(),
                "create_by": dbsUtils().getCreateBy()
            })
            self.dbsOutputConfig.insertOutputConfig(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler(
                "dbsException-invalid-input2",
                "Wrong format/data from insert OutputConfig input",
                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = "DBSWriterModel/insertOutputConfig. %s\n. Exception trace: \n %s" \
                            % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)
Beispiel #43
0
    def insertDataset(self):
        """
        API to insert a dataset in DBS

        :param datasetObj: Dataset object
        :type datasetObj: dict
        :key primary_ds_name: Primary Dataset Name (Required)
        :key dataset: Name of the dataset (Required)
        :key dataset_access_type: Dataset Access Type (Required)
        :key processed_ds_name: Processed Dataset Name (Required)
        :key data_tier_name: Data Tier Name (Required)
        :key acquisition_era_name: Acquisition Era Name (Required)
        :key processing_version: Processing Version (Required)
        :key physics_group_name: Physics Group Name (Optional, default None)
        :key prep_id: ID of the Production and Reprocessing management tool (Optional, default None)
        :key xtcrosssection: Xtcrosssection (Optional, default None)
        :key output_configs: List(dict) with keys release_version, pset_hash, app_name, output_module_label and global tag

        """
        try:
            body = request.body.read()
            indata = cjson.decode(body)
            indata = validateJSONInputNoCopy('dataset', indata)
            indata.update({"creation_date": dbsUtils().getTime(),
                            "last_modification_date" : dbsUtils().getTime(),
                            "create_by" : dbsUtils().getCreateBy() ,
                            "last_modified_by" : dbsUtils().getCreateBy() })

            # need proper validation
            self.dbsDataset.insertDataset(indata)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert dataset input",  self.logger.exception, str(dc)) 
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)
        except HTTPError as he:
            raise he
        except Exception, ex:
            sError = " DBSWriterModel/insertDataset. %s\n. Exception trace: \n %s" \
                        % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',  dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
    def addService(self):
        """
        Add a service to service registry
        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            
            body = request.body.read()
            service = cjson.decode(body)
            addthis = {}
            addthis['service_id'] = self.sm.increment(conn, "SEQ_RS", tran)
            addthis['name'] = service.get('NAME', '')
            if addthis['name'] == '':
                msg = (("%s DBSServicesRegistry/addServices." +
                        " Service Must be Named\n") %
                       DBSEXCEPTIONS['dbsException-3'])
                raise Exception("dbsException-3", msg)
            addthis['type'] = service.get('TYPE', 'GENERIC')
            addthis['location'] = service.get('LOCATION', 'HYPERSPACE')
            addthis['status'] = service.get('STATUS', 'UNKNOWN')
            addthis['admin'] = service.get('ADMIN', 'UNADMINISTRATED')
            addthis['uri'] = service.get('URI','')
            if addthis['uri'] == '':
                msg = (("%s DBSServicesRegistry/addServices." + 
                        " Service URI must be provided.\n") %
                       DBSEXCEPTIONS['dbsException-3'])
                self.logger.exception(msg)
                raise Exception("dbsException-3", msg)
            addthis['db'] = service.get('DB', 'NO_DATABASE')
            addthis['version'] = service.get('VERSION','UNKNOWN' )
            addthis['last_contact'] = dbsUtils().getTime()
            addthis['comments'] = service.get('COMMENTS', 'NO COMMENTS')
            addthis['alias'] = service.get('ALIAS', 'No Alias')
            self.servicesadd.execute(conn, addthis, tran)
            tran.commit()
        except exceptions.IntegrityError, ex:
            if (str(ex).find("unique constraint") != -1 or
                str(ex).lower().find("duplicate") != -1) :
                #Update the service instead
                try:
                    self.servicesupdate.execute(conn, addthis, tran)
                    tran.commit()
                except Exception, ex:
                    msg = (("%s DBSServiceRegistry/addServices." + 
                            " %s\n. Exception trace: \n %s") %
                           (DBSEXCEPTIONS['dbsException-3'], ex,
                            traceback.format_exc()))
                    self.logger.exception(msg ) 
                    raise Exception ("dbsException-3", msg )
Beispiel #45
0
    def addService(self):
        """
        Add a service to service registry
        """

        conn = self.dbi.connection()
        tran = conn.begin()
        try:

            body = request.body.read()
            service = cjson.decode(body)
            addthis = {}
            addthis['service_id'] = self.sm.increment(conn, "SEQ_RS", tran)
            addthis['name'] = service.get('NAME', '')
            if addthis['name'] == '':
                msg = (("%s DBSServicesRegistry/addServices." +
                        " Service Must be Named\n") %
                       DBSEXCEPTIONS['dbsException-3'])
                raise Exception("dbsException-3", msg)
            addthis['type'] = service.get('TYPE', 'GENERIC')
            addthis['location'] = service.get('LOCATION', 'HYPERSPACE')
            addthis['status'] = service.get('STATUS', 'UNKNOWN')
            addthis['admin'] = service.get('ADMIN', 'UNADMINISTRATED')
            addthis['uri'] = service.get('URI', '')
            if addthis['uri'] == '':
                msg = (("%s DBSServicesRegistry/addServices." +
                        " Service URI must be provided.\n") %
                       DBSEXCEPTIONS['dbsException-3'])
                self.logger.exception(msg)
                raise Exception("dbsException-3", msg)
            addthis['db'] = service.get('DB', 'NO_DATABASE')
            addthis['version'] = service.get('VERSION', 'UNKNOWN')
            addthis['last_contact'] = dbsUtils().getTime()
            addthis['comments'] = service.get('COMMENTS', 'NO COMMENTS')
            addthis['alias'] = service.get('ALIAS', 'No Alias')
            self.servicesadd.execute(conn, addthis, tran)
            tran.commit()
        except exceptions.IntegrityError, ex:
            if (str(ex).find("unique constraint") != -1
                    or str(ex).lower().find("duplicate") != -1):
                #Update the service instead
                try:
                    self.servicesupdate.execute(conn, addthis, tran)
                    tran.commit()
                except Exception, ex:
                    msg = (("%s DBSServiceRegistry/addServices." +
                            " %s\n. Exception trace: \n %s") %
                           (DBSEXCEPTIONS['dbsException-3'], ex,
                            traceback.format_exc()))
                    self.logger.exception(msg)
                    raise Exception("dbsException-3", msg)
Beispiel #46
0
    def execute(self, conn, migration_url="", migration_input="", create_by="", migration_request_id="", oldest= False, transaction=False):
        """
        Lists all requests if pattern is not provided.
        """
        if not conn:
	    dbsExceptionHandler("dbsException-db-conn-failed","Oracle/MigrationRequests/List. Expects db connection from upper layer.")
        sql = self.sql
        binds = {}
	if migration_request_id:
	    sql += " WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id"
	    binds['migration_request_id']=migration_request_id
        elif oldest:
            #FIXME: Need to write the sql.YG
            #current_date = dbsUtils().getTime()
            #we require waiting time for 
            #retry_count=0 is 1 minutes
            #retry_count=1 is 2 minutes
            #retyr_count=2 is 4 minutes

            sql += """
                       WHERE MR.MIGRATION_STATUS=0 
                       or (MR.migration_status=3 and MR.retry_count=0 and MR.last_modification_date <= :current_date-60)    
                       or (MR.migration_status=3 and MR.retry_count=1 and MR.last_modification_date <= :current_date-120)  
                       or (MR.migration_status=3 and MR.retry_count=2 and MR.last_modification_date <= :current_date-240)
                       ORDER BY MR.creation_date
                   """ 
            binds['current_date'] = dbsUtils().getTime()
            #print "time= " + str(binds['current_date'])
        else:    
	    if  migration_url or migration_input or create_by:
		sql += " WHERE "
	    if migration_url:
		sql += " MR.MIGRATION_URL=:migration_url"
		binds['migration_url']=migration_url
	    if  migration_input:
		if migration_url:
		    sql += " AND "
		op = ("=", "like")["%" in migration_input]
		sql += " MR.MIGRATION_INPUT %s :migration_input" % op
		binds['migration_input']=migration_input
	    if create_by:
		if  migration_url or migration_input:
		    sql += " AND "
		sql += " MR.CREATE_BY=:create_by" %create_by
		binds['create_by']=create_by
	cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)
        result = []
        for c in cursors:
            result.extend(self.formatCursor(c))
        return result
Beispiel #47
0
 def execute(self,
             conn,
             block_name,
             open_for_writing,
             ltime,
             transaction=False):
     """
     for a given file
     """
     if not conn:
         dbsExceptionHandler(
             "dbsException-failed-connect2host",
             "Oracle/Block/UpdateStatus. Expects db connection from upper layer.",
             self.logger.exception)
     binds = {
         "block_name": block_name,
         "open_for_writing": open_for_writing,
         'ltime': ltime,
         'myuser': dbsUtils().getCreateBy()
     }
     self.dbi.processData(self.sql, binds, conn, transaction)
Beispiel #48
0
 def updateStatus(self, block_name="", open_for_writing=0):
     """
     Used to toggle the status of a block open_for_writing=1, open for writing, open_for_writing=0, closed
     """
     if open_for_writing not in [1, 0, '1', '0']:
         msg = "DBSBlock/updateStatus. open_for_writing can only be 0 or 1 : passed %s."\
                % open_for_writing
         dbsExceptionHandler('dbsException-invalid-input', msg)
     conn = self.dbi.connection()
     trans = conn.begin()
     try:
         open_for_writing = int(open_for_writing)
         self.updatestatus.execute(conn, block_name, open_for_writing,
                                   dbsUtils().getTime(), trans)
         trans.commit()
         trans = None
     except Exception, ex:
         if trans:
             trans.rollback()
         if conn: conn.close()
         raise ex
Beispiel #49
0
    def insertFile(self, businput, qInserts=False):
        """
        This method supports bulk insert of files
        performing other operations such as setting Block and Dataset parentages,
        setting mapping between OutputConfigModules and File(s) etc.

        :param qInserts: True means that inserts will be queued instead of done immediately. INSERT QUEUE Manager will perform the inserts, within few minutes.
        :type qInserts: bool
        :param logical_file_name (required) : string
        :param is_file_valid: (optional, default = 1): 1/0
        :param block, required: /a/b/c#d
        :param dataset, required: /a/b/c
        :param file_type (optional, default = EDM): one of the predefined types,
        :param check_sum (optional): string
        :param event_count (optional, default = -1): int
        :param file_size (optional, default = -1.): float
        :param adler32 (optional): string
        :param md5 (optional): string
        :param auto_cross_section (optional, default = -1.): float
        :param file_lumi_list (optional, default = []): [{'run_num': 123, 'lumi_section_num': 12},{}....]
        :param file_parent_list(optional, default = []) :[{'file_parent_lfn': 'mylfn'},{}....]
        :param file_assoc_list(optional, default = []) :[{'file_parent_lfn': 'mylfn'},{}....]
        :param file_output_config_list(optional, default = []) :
        [{'app_name':..., 'release_version':..., 'pset_hash':...., output_module_label':...},{}.....]
        """

        # We do not want to go be beyond 10 files at a time
        # If user wants to insert over 10 files in one shot, we run into risks of locking the database
        # tables for longer time, and in case of error, it will be hard to see where error occured
        if len(businput) > 10:
            dbsExceptionHandler('dbsException-input-too-large', "DBS cannot insert \
                    more than 10 files in one bulk call")
            return

        conn = self.dbi.connection()
        tran = conn.begin()
        try:
            #Now we are dealing with independent files that have different dataset/block and so on.
            #See Trac #358.
            #The expected input data format is a list of dictionary to insert independent files into DBS,
            #inputdata={'files':[{}, {}, {}]}
            #YG  09/15/2011

            # AA- 01/06/2010 -- we have to do this file-by-file, there is no real good way to do this complex operation otherwise
            #files2insert = []
            #fidl = []
            fileInserted = False
            dataset = ""
            block_name = ""
            dataset_id = -1
            block_id = -1
            dsconfigs = []
            for f in businput:
                if not ("logical_file_name" in f and "block_name" in f and "dataset" in f ):
                    dbsExceptionHandler('dbsException-invalid-input', "DBSFile/insertFile must have logical_file_name, block_name and dataset as input")
                if f["block_name"].split('#')[0] != f["dataset"]:
                    dbsExceptionHandler('dbsException-invalid-input', "DBSFile/insertFile: dataset and block_name NOT match")
                # first check if the dataset exists
                # and block exists that files are suppose to be going to and is OPEN for writing
                if dataset != f["dataset"]:
                    dataset_id = self.datasetid.execute(conn, dataset=f["dataset"])
                    dataset = f["dataset"]
                    if dataset_id == -1 :
                        dbsExceptionHandler('dbsException-missing-data', "Required Dataset Not Found.", None,
                        "Required Dataset %s does not exist"%f["dataset"] )
                    # get the list of configs in for this dataset
                    dsconfigs = [x['output_mod_config_id'] for x in self.dsconfigids.execute(conn, dataset=f["dataset"])]
                fileconfigs = [] # this will hold file configs that we will list in the insert file logic below
                if block_name != f["block_name"]:
                    block_info = self.blocklist.execute(conn, block_name=f["block_name"])
		    for b in block_info:
			if not b  : 
			    dbsExceptionHandler( "dbsException-missing-data", "Required block not found", None,
                                                          "Cannot found required block %s in DB" %f["block_name"])
			else:	
			    if  b["open_for_writing"] != 1 : 
				dbsExceptionHandler("dbsException-conflict-data", "Block closed", None,
				    "Block %s is not open for writting" %f["block_name"])
			    if "block_id" in b:
				block_id = b["block_id"]
			    else:
				dbsExceptionHandler("dbsException-missing-data", "Block not found", None,
                                          "Cannot found required block %s in DB" %f["block_name"])
                else: dbsExceptionHandler('dbsException-missing-data', "Required block name Not Found in input.",
                                            None, "Required block Not Found in input.")
                #make the default file_type=EDM
                file_type_id = self.ftypeid.execute( conn, f.get("file_type", "EDM"))
                if file_type_id == -1:
                    dbsExceptionHandler('dbsException-missing-data', "File type not found.", None,
                                        "Required file type %s not found in DBS"%f.get("file_type", "EDM") )

                iFile = 0
                fileIncrement = 40
                fID = self.sm.increment(conn, "SEQ_FL", incCount=fileIncrement)
                #looping over the files, everytime create a new object 'filein' as you never know
                #whats in the original object and we do not want to know
                #for f in businput:
                file_clob = {}
                fparents2insert = []
                flumis2insert = []
                fconfigs2insert = []
                # create the file object from the original
                # taking care of defaults, and required
                filein = {
                    "logical_file_name" : f["logical_file_name"],
                    "is_file_valid" : f.get("is_file_valid", 1),
                    "check_sum" : f.get("check_sum", None),
                    "event_count" : f.get("event_count", -1),
                    "file_size" : f.get("file_size", -1),
                    "adler32" : f.get("adler32", None),
                    "md5" : f.get("md5", None),
                    "auto_cross_section" : f.get("auto_cross_section", -1),
                    #"creation_date" : f.get("creation_date", None),  See Ticket #965 YG.
                    #"create_by": f.get("create_by", None),
                    "last_modification_date": f.get("last_modification_date", None),
                    #"last_modified_by" : f.get("last_modified_by", None)
                    "last_modified_by" : dbsUtils().getCreateBy()
                }
                if filein["md5"] is None  and filein["check_sum"] is None and filein["adler32"] is None:
                    dbsExceptionHandler('dbsException-invalid-input', "Missing check_sum or adler32, or md5")
                if iFile == fileIncrement:
                    fID = self.sm.increment(conn, "SEQ_FL", incCount=fileIncrement)
                    iFile = 0
                filein["file_id"] = fID + iFile
                iFile += 1
                filein["dataset_id"] = dataset_id
                filein["block_id"] = block_id
                filein["file_type_id"] = file_type_id
                #FIXME: Add this later if f.get("branch_hash", "") not in ("", None):
                #filein["branch_hash"]=self.fbranchid.execute( f.get("branch_hash"), conn, transaction=tran)
                # insert file  -- as decided, one file at a time
                # filein will be what goes into database
                try:
                    if not qInserts:
                        self.filein.execute(conn, filein, transaction=tran)
                        fileInserted = True
                    else:
                        file_clob['file'] = filein
                except SQLAlchemyIntegrityError as ex:
                    if str(ex).find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1:
                        # Lets move on to NEXT file, we do not want to continue processing this file

                        #Nothing about this file is updated when it is already in DB. No file parentage, block parentage, dataset parentage and so on.
                        #Is this right?  YG  Oct. 24
                        self.logger.warning("DBSFile/insertFile. File already exists in DBS, not changing it: %s"
                                            %filein["logical_file_name"] )
                        continue
                    else:
                        raise

                #process file parents, file lumi, file outputmodconfigs, ...
                #file lumi sections
                if "file_lumi_list" in f:
                    fllist = f["file_lumi_list"]
                    if len(fllist) > 0:
                        for fl in fllist:
                            fldao = {
                                "run_num" : fl["run_num"],
                                "lumi_section_num" : fl["lumi_section_num"]
                            }
                            fldao["file_id"] = filein["file_id"]
                            flumis2insert.append(fldao)

                if "file_parent_list" in f:
                    #file parents
                    fplist = f["file_parent_list"]

                    for fp in fplist:
                        fpdao = {}
                        fpdao["this_file_id"] = filein["file_id"]
                        fpdao["parent_logical_file_name"] = fp["file_parent_lfn"]
                        fparents2insert.append(fpdao)
                if "file_output_config_list" in f:
                    #file output config modules
                    foutconfigs = f["file_output_config_list"]
                    if(len(foutconfigs) > 0):
                        for fc in foutconfigs:
                            fcdao = {}
                            fcdao["file_id"] = filein["file_id"]
                            fcdao["output_mod_config_id"] = self.outconfigid.execute(conn, fc["app_name"],
                                    fc["release_version"], fc["pset_hash"], fc["output_module_label"],
                                    fc["global_tag"])
                            if fcdao["output_mod_config_id"] == -1 :
                                dbsExceptionHandler('dbsException-missing-data', 'Config Not found.', None, "DBSFile/insertFile.\
                                        Output module config (%s, %s, %s, %s) \
                                        not found" % (fc["app_name"],
                                        fc["release_version"], fc["pset_hash"], fc["output_module_label"]) )
                            fileconfigs.append(fcdao["output_mod_config_id"])
                            fconfigs2insert.append(fcdao)
                #FIXME: file associations?-- in a later release
                #
                # insert file - lumi
                if flumis2insert:
                    file_clob['file_lumi_list'] = flumis2insert
                    if not qInserts:
                        self.flumiin.execute(conn, flumis2insert, transaction=tran)
                # insert file parent mapping
                if fparents2insert:
                    file_clob['file_parent_list'] = fparents2insert
                    if not qInserts:
                        self.fparentin.execute(conn, fparents2insert, transaction=tran)
                # First check to see if these output configs are mapped to THIS dataset as well, if not raise an exception
                if not set(fileconfigs).issubset(set(dsconfigs)) :
                    dbsExceptionHandler('dbsException-conflict-data', 'Mismatched configure. ', None, "DBSFile/insertFile. Output configs mismatch, \
                            output configs known to dataset: \
                            %s are different from what are being mapped to file : %s " \
                            %(f["dataset"], filein["logical_file_name"]) )
                # insert output module config mapping
                if fconfigs2insert:
                    file_clob['file_output_config_list'] = fconfigs2insert
                    if not qInserts:
                        self.fconfigin.execute(conn, fconfigs2insert, transaction=tran)
                if qInserts:
                    try:
                        self.logger.warning(file_clob)
                        self.filebufin.execute(conn, filein['logical_file_name'], block_id, file_clob, transaction=tran)
                    except SQLAlchemyIntegrityError as ex:
                        if str(ex).find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1:
                            pass
                        else:
                            raise

                #insert block parentages and dataset parentages based on file parentages
                # Do this one by one, as it is sure to have duplicate in dest table
                if fileInserted and fparents2insert:
                    for fp in fparents2insert:
                        try:
                            bkParentage2insert={'this_block_id' : filein["block_id"], 'parent_logical_file_name': fp['parent_logical_file_name']}
                            self.blkparentin.execute(conn, bkParentage2insert, transaction=tran)
                            dsParentage2insert={'this_dataset_id': filein["dataset_id"], 'parent_logical_file_name' : fp['parent_logical_file_name']}
                            self.dsparentin.execute(conn, dsParentage2insert, transaction=tran)
                        except SQLAlchemyIntegrityError as ex:
                            #ORA-00001
                            if (str(ex).find("ORA-00001") != -1 and str(ex).find("PK_DP") != -1) or str(ex).find("PK_BP") != -1 or str(ex).lower().find("duplicate") != -1:
                                pass
                            elif str(ex).find("ORA-01400") != -1:
                                raise
                            else:
                                raise

                # Update block parameters, file_count, block_size
                if not qInserts:
                    blkParams = self.blkstats.execute(conn, block_id,
                                                      transaction=tran)
                    blkParams['block_size'] = long(blkParams['block_size'])
                    self.blkstatsin.execute(conn, blkParams, transaction=tran)

            # All good ?
            tran.commit()
            tran = None

        except Exception as ex:
            if tran:
                tran.rollback()
                tran = None
            raise

        finally:
            if tran:
                tran.rollback()
            if conn:
                conn.close()
Beispiel #50
0
    def insertDataset(self, businput):
        """
        input dictionary must have the following keys:
        dataset, primary_ds_name(name), processed_ds(name), data_tier(name),
        acquisition_era(name), processing_version
        It may have following keys:
        physics_group(name), xtcrosssection, creation_date, create_by, 
        last_modification_date, last_modified_by
        """ 
        if not ("primary_ds_name" in businput and "dataset" in businput
                and "dataset_access_type" in businput and "processed_ds_name" in businput ):
            dbsExceptionHandler('dbsException-invalid-input', "business/DBSDataset/insertDataset must have dataset,\
                dataset_access_type, primary_ds_name, processed_ds_name as input")

        if "data_tier_name" not in businput:
            dbsExceptionHandler('dbsException-invalid-input', "insertDataset must have data_tier_name as input.")

        conn = self.dbi.connection()
        tran = conn.begin()
        try:

            dsdaoinput = {}
            dsdaoinput["primary_ds_name"] = businput["primary_ds_name"]
            dsdaoinput["data_tier_name"] =  businput["data_tier_name"].upper()
            dsdaoinput["dataset_access_type"] = businput["dataset_access_type"].upper()
            #not required pre-exist in the db. will insert with the dataset if not in yet
            #processed_ds_name=acquisition_era_name[-fileter_name][-processing_str]-vprocessing_version   Changed as 4/30/2012 YG.
            #althrough acquisition era and processing version is not required for a dataset in the schema(the schema is build this way because
            #we need to accomdate the DBS2 data), but we impose the requirement on the API. So both acquisition and processing eras are required 
            #YG 12/07/2011  TK-362
            if "acquisition_era_name" in businput and "processing_version" in businput:
                erals=businput["processed_ds_name"].rsplit('-')
                if erals[0]==businput["acquisition_era_name"] and erals[len(erals)-1]=="%s%s"%("v", businput["processing_version"]):
                    dsdaoinput["processed_ds_name"] = businput["processed_ds_name"]
                else:
                    dbsExceptionHandler('dbsException-invalid-input', "insertDataset:\
                    processed_ds_name=acquisition_era_name[-filter_name][-processing_str]-vprocessing_version must be satisified.")
            else:
                dbsExceptionHandler("dbsException-missing-data",  "insertDataset: Required acquisition_era_name or processing_version is not found in the input")
            
            if "physics_group_name" in businput:
                dsdaoinput["physics_group_id"] = self.phygrpid.execute(conn, businput["physics_group_name"])
                if dsdaoinput["physics_group_id"]  == -1:
                    dbsExceptionHandler("dbsException-missing-data",  "insertDataset. physics_group_name not found in DB")
            else:
                dsdaoinput["physics_group_id"] = None

            dsdaoinput["dataset_id"] = self.sm.increment(conn, "SEQ_DS")
            # we are better off separating out what we need for the dataset DAO
            dsdaoinput.update({ 
                               "dataset" : "/%s/%s/%s" %
                               (businput["primary_ds_name"],
                                businput["processed_ds_name"],
                                businput["data_tier_name"].upper()),
                               "prep_id" : businput.get("prep_id", None),
                               "xtcrosssection" : businput.get("xtcrosssection", None),
                               "creation_date" : businput.get("creation_date", dbsUtils().getTime() ),
                               "create_by" : businput.get("create_by", dbsUtils().getCreateBy()) ,
                               "last_modification_date" : businput.get("last_modification_date", dbsUtils().getTime()),
                               #"last_modified_by" : businput.get("last_modified_by", dbsUtils().getModifiedBy())
                               "last_modified_by" : dbsUtils().getModifiedBy()
                               })
            """
            repeated again, why?  comment out by YG 3/14/2012
            #physics group
            if "physics_group_name" in businput:
                dsdaoinput["physics_group_id"] = self.phygrpid.execute(conn, businput["physics_group_name"])
                if dsdaoinput["physics_group_id"]  == -1:
                    dbsExceptionHandler("dbsException-missing-data",  "insertDataset. Physics Group : %s Not found"
                                                                                    % businput["physics_group_name"])
            else: dsdaoinput["physics_group_id"] = None
            """
            # See if Processing Era exists
            if "processing_version" in businput and businput["processing_version"] != 0:
                dsdaoinput["processing_era_id"] = self.proceraid.execute(conn, businput["processing_version"])
                if dsdaoinput["processing_era_id"] == -1 :
                    dbsExceptionHandler("dbsException-missing-data", "DBSDataset/insertDataset: processing_version not found in DB") 
            else:
                dbsExceptionHandler("dbsException-invalid-input", "DBSDataset/insertDataset: processing_version is required")

            # See if Acquisition Era exists
            if "acquisition_era_name" in businput:
                dsdaoinput["acquisition_era_id"] = self.acqeraid.execute(conn, businput["acquisition_era_name"])
                if dsdaoinput["acquisition_era_id"] == -1:
                    dbsExceptionHandler("dbsException-missing-data", "DBSDataset/insertDataset: acquisition_era_name not found in DB")
            else:
                dbsExceptionHandler("dbsException-invalid-input", "DBSDataset/insertDataset:  acquisition_era_name is required")
            try:
                # insert the dataset
                self.datasetin.execute(conn, dsdaoinput, tran)
            except SQLAlchemyIntegrityError as ex:
                if (str(ex).lower().find("unique constraint") != -1 or
                    str(ex).lower().find("duplicate") != -1):
                    # dataset already exists, lets fetch the ID
                    self.logger.warning(
                            "Unique constraint violation being ignored...")
                    self.logger.warning("%s" % ex)
                    ds = "/%s/%s/%s" % (businput["primary_ds_name"], businput["processed_ds_name"], businput["data_tier_name"].upper())
                    dsdaoinput["dataset_id"] = self.datasetid.execute(conn, ds )
                    if dsdaoinput["dataset_id"] == -1 :
                        dbsExceptionHandler("dbsException-missing-data", "DBSDataset/insertDataset. Strange error, the dataset %s does not exist ?" 
                                                    % ds )
                if (str(ex).find("ORA-01400") ) != -1 :
                    dbsExceptionHandler("dbsException-missing-data", "insertDataset must have: dataset,\
                                          primary_ds_name, processed_ds_name, data_tier_name ")
            except Exception as e:
                raise       

            #FIXME : What about the READ-only status of the dataset
            #There is no READ-oly status for a dataset.

            # Create dataset_output_mod_mod_configs mapping
            if "output_configs" in businput:
                for anOutConfig in businput["output_configs"]:
                    dsoutconfdaoin = {}
                    dsoutconfdaoin["dataset_id"] = dsdaoinput["dataset_id"]
                    dsoutconfdaoin["output_mod_config_id"] = self.outconfigid.execute(conn, anOutConfig["app_name"],
                                                                                anOutConfig["release_version"],
                                                                                anOutConfig["pset_hash"],
                                                                                anOutConfig["output_module_label"],
                                                                                anOutConfig["global_tag"]) 
                    if dsoutconfdaoin["output_mod_config_id"] == -1 : 

                        dbsExceptionHandler("dbsException-missing-data", "DBSDataset/insertDataset: Output config (%s, %s, %s, %s, %s) not found"
                                                                                % (anOutConfig["app_name"],
                                                                                   anOutConfig["release_version"],
                                                                                   anOutConfig["pset_hash"],
                                                                                   anOutConfig["output_module_label"],
                                                                                   anOutConfig["global_tag"]))
                    try:
                        self.datasetoutmodconfigin.execute(conn, dsoutconfdaoin, tran)
                    except Exception as ex:
                        if str(ex).lower().find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1:
                            pass
                        else:
                            raise
            # Dataset parentage will NOT be added by this API it will be set by insertFiles()--deduced by insertFiles
            # Dataset  runs will NOT be added by this API they will be set by insertFiles()--deduced by insertFiles OR insertRun API call
            tran.commit()
            tran = None
        except Exception:
            if tran:
                tran.rollback()
                tran = None
            raise
        finally:
            if tran:
                tran.rollback()
            if conn:
                conn.close()
Beispiel #51
0
    def insertDataset(self, businput):
        """
        input dictionary must have the following keys:
        dataset, primary_ds_name(name), processed_ds(name), data_tier(name),
        acquisition_era(name), processing_version
        It may have following keys:
        physics_group(name), xtcrosssection, creation_date, create_by, 
        last_modification_date, last_modified_by
        """
        if not (businput.has_key("primary_ds_name")
                and businput.has_key("dataset")
                and businput.has_key("dataset_access_type")
                and businput.has_key("processed_ds_name")):
            dbsExceptionHandler(
                'dbsException-invalid-input',
                "business/DBSDataset/insertDataset must have dataset,\
                dataset_access_type, primary_ds_name, processed_ds_name as input"
            )

        if not businput.has_key("data_tier_name"):
            dbsExceptionHandler(
                'dbsException-invalid-input',
                "insertDataset must have data_tier_name as input.")

        conn = self.dbi.connection()
        tran = conn.begin()
        try:

            dsdaoinput = {}
            dsdaoinput["primary_ds_name"] = businput["primary_ds_name"]
            dsdaoinput["data_tier_name"] = businput["data_tier_name"].upper()
            dsdaoinput["dataset_access_type"] = businput[
                "dataset_access_type"].upper()
            #not required pre-exist in the db. will insert with the dataset if not in yet
            #processed_ds_name=acquisition_era_name[-fileter_name][-processing_str]-vprocessing_version   Changed as 4/30/2012 YG.
            #althrough acquisition era and processing version is not required for a dataset in the schema(the schema is build this way because
            #we need to accomdate the DBS2 data), but we impose the requirement on the API. So both acquisition and processing eras are required
            #YG 12/07/2011  TK-362
            if businput.has_key("acquisition_era_name") and businput.has_key(
                    "processing_version"):
                erals = businput["processed_ds_name"].rsplit('-')
                if erals[0] == businput["acquisition_era_name"] and erals[
                        len(erals) -
                        1] == "%s%s" % ("v", businput["processing_version"]):
                    dsdaoinput["processed_ds_name"] = businput[
                        "processed_ds_name"]
                else:
                    dbsExceptionHandler(
                        'dbsException-invalid-input', "insertDataset:\
                    processed_ds_name=acquisition_era_name[-filter_name][-processing_str]-vprocessing_version must be satisified."
                    )
            else:
                dbsExceptionHandler(
                    "dbsException-missing-data",
                    "insertDataset: Required acquisition_era_name or processing_version is not found in the input"
                )

            if "physics_group_name" in businput:
                dsdaoinput["physics_group_id"] = self.phygrpid.execute(
                    conn, businput["physics_group_name"])
                if dsdaoinput["physics_group_id"] == -1:
                    dbsExceptionHandler(
                        "dbsException-missing-data",
                        "insertDataset. physics_group_name not found in DB")
            else:
                dsdaoinput["physics_group_id"] = None

            dsdaoinput["dataset_id"] = self.sm.increment(conn, "SEQ_DS")
            # we are better off separating out what we need for the dataset DAO
            dsdaoinput.update({
                "dataset":
                "/%s/%s/%s" %
                (businput["primary_ds_name"], businput["processed_ds_name"],
                 businput["data_tier_name"].upper()),
                "prep_id":
                businput.get("prep_id", None),
                "xtcrosssection":
                businput.get("xtcrosssection", None),
                "creation_date":
                businput.get("creation_date",
                             dbsUtils().getTime()),
                "create_by":
                businput.get("create_by",
                             dbsUtils().getCreateBy()),
                "last_modification_date":
                businput.get("last_modification_date",
                             dbsUtils().getTime()),
                #"last_modified_by" : businput.get("last_modified_by", dbsUtils().getModifiedBy())
                "last_modified_by":
                dbsUtils().getModifiedBy()
            })
            """
            repeated again, why?  comment out by YG 3/14/2012
            #physics group
            if "physics_group_name" in businput:
                dsdaoinput["physics_group_id"] = self.phygrpid.execute(conn, businput["physics_group_name"])
                if dsdaoinput["physics_group_id"]  == -1:
                    dbsExceptionHandler("dbsException-missing-data",  "insertDataset. Physics Group : %s Not found"
                                                                                    % businput["physics_group_name"])
            else: dsdaoinput["physics_group_id"] = None
            """
            # See if Processing Era exists
            if businput.has_key("processing_version"
                                ) and businput["processing_version"] != 0:
                dsdaoinput["processing_era_id"] = self.proceraid.execute(
                    conn, businput["processing_version"])
                if dsdaoinput["processing_era_id"] == -1:
                    dbsExceptionHandler(
                        "dbsException-missing-data",
                        "DBSDataset/insertDataset: processing_version not found in DB"
                    )
            else:
                dbsExceptionHandler(
                    "dbsException-invalid-input",
                    "DBSDataset/insertDataset: processing_version is required")

            # See if Acquisition Era exists
            if businput.has_key("acquisition_era_name"):
                dsdaoinput["acquisition_era_id"] = self.acqeraid.execute(
                    conn, businput["acquisition_era_name"])
                if dsdaoinput["acquisition_era_id"] == -1:
                    dbsExceptionHandler(
                        "dbsException-missing-data",
                        "DBSDataset/insertDataset: acquisition_era_name not found in DB"
                    )
            else:
                dbsExceptionHandler(
                    "dbsException-invalid-input",
                    "DBSDataset/insertDataset:  acquisition_era_name is required"
                )
            try:
                # insert the dataset
                self.datasetin.execute(conn, dsdaoinput, tran)
            except SQLAlchemyIntegrityError, ex:
                if (str(ex).lower().find("unique constraint") != -1
                        or str(ex).lower().find("duplicate") != -1):
                    # dataset already exists, lets fetch the ID
                    self.logger.warning(
                        "Unique constraint violation being ignored...")
                    self.logger.warning("%s" % ex)
                    ds = "/%s/%s/%s" % (businput["primary_ds_name"],
                                        businput["processed_ds_name"],
                                        businput["data_tier_name"].upper())
                    dsdaoinput["dataset_id"] = self.datasetid.execute(conn, ds)
                    if dsdaoinput["dataset_id"] == -1:
                        dbsExceptionHandler(
                            "dbsException-missing-data",
                            "DBSDataset/insertDataset. Strange error, the dataset %s does not exist ?"
                            % ds)
                if (str(ex).find("ORA-01400")) != -1:
                    dbsExceptionHandler(
                        "dbsException-missing-data",
                        "insertDataset must have: dataset,\
                                          primary_ds_name, processed_ds_name, data_tier_name "
                    )
            except Exception, e:
                raise
Beispiel #52
0
    def insertFile(self, qInserts=False):
        """
        API to insert a list of file into DBS in DBS. Up to 10 files can be inserted in one request.

        :param qInserts: True means that inserts will be queued instead of done immediately. INSERT QUEUE Manager will perform the inserts, within few minutes.
        :type qInserts: bool
        :param filesList: List of dictionaries containing following information
        :type filesList: list of dicts
        :key logical_file_name: File to be inserted (str) (Required)
        :key is_file_valid: (optional, default = 1): (bool)
        :key block: required: /a/b/c#d (str)
        :key dataset: required: /a/b/c (str)
        :key file_type: (optional, default = EDM) one of the predefined types, (str)
        :key check_sum: (optional, default = '-1') (str)
        :key event_count: (optional, default = -1) (int)
        :key file_size: (optional, default = -1.) (float)
        :key adler32: (optional, default = '') (str)
        :key md5: (optional, default = '') (str)
        :key auto_cross_section: (optional, default = -1.) (float)
        :key file_lumi_list: (optional, default = []) [{'run_num': 123, 'lumi_section_num': 12},{}....]
        :key file_parent_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]
        :key file_assoc_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]
        :key file_output_config_list: (optional, default = []) [{'app_name':..., 'release_version':..., 'pset_hash':...., output_module_label':...},{}.....]

        """
        if qInserts in (False, 'False'): qInserts = False
        try:
            body = request.body.read()
            indata = cjson.decode(body)["files"]
            if not isinstance(indata, (list, dict)):
                dbsExceptionHandler("dbsException-invalid-input", "Invalid Input DataType", self.logger.exception, \
                                      "insertFile expects input as list or dirc")
            businput = []
            if isinstance(indata, dict):
                indata = [indata]
            indata = validateJSONInputNoCopy("files", indata)
            for f in indata:
                f.update({
                    #"dataset":f["dataset"],
                    "creation_date":
                    f.get("creation_date",
                          dbsUtils().getTime()),
                    "create_by":
                    dbsUtils().getCreateBy(),
                    "last_modification_date":
                    f.get("last_modification_date",
                          dbsUtils().getTime()),
                    "last_modified_by":
                    f.get("last_modified_by",
                          dbsUtils().getCreateBy()),
                    "file_lumi_list":
                    f.get("file_lumi_list", []),
                    "file_parent_list":
                    f.get("file_parent_list", []),
                    "file_assoc_list":
                    f.get("assoc_list", []),
                    "file_output_config_list":
                    f.get("file_output_config_list", [])
                })
                businput.append(f)
            self.dbsFile.insertFile(businput, qInserts)
        except cjson.DecodeError as dc:
            dbsExceptionHandler("dbsException-invalid-input2",
                                "Wrong format/data from insert File input",
                                self.logger.exception, str(dc))
        except dbsException as de:
            dbsExceptionHandler(de.eCode, de.message, self.logger.exception,
                                de.message)
        except HTTPError as he:
            raise he
        except Exception as ex:
            sError = "DBSWriterModel/insertFile. %s\n. Exception trace: \n %s" \
                    % (ex, traceback.format_exc())
            dbsExceptionHandler('dbsException-server-error',
                                dbsExceptionCode['dbsException-server-error'],
                                self.logger.exception, sError)