def listReleaseVersions(self, release_version="", dataset='', logical_file_name=''): """ List release versions """ if dataset and ('%' in dataset or '*' in dataset): dbsExceptionHandler( 'dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n.") if logical_file_name and ('%' in logical_file_name or '*' in logical_file_name): dbsExceptionHandler( 'dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in logical_file_name.\n.") conn = self.dbi.connection() try: plist = self.releaseVersion.execute(conn, release_version.upper(), dataset, logical_file_name) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['release_version'] = t return result finally: if conn: conn.close()
def insertBulkBlock(self): """ API to insert a bulk block :param blockDump: Output of the block dump command :type blockDump: dict """ try: body = request.body.read() indata = cjson.decode(body) if (indata.get("file_parent_list", []) and indata.get("dataset_parent_list", [])): dbsExceptionHandler( "dbsException-invalid-input2", "insertBulkBlock: dataset and file parentages cannot be in the input at the same time", self.logger.exception, "insertBulkBlock: datset and file parentages cannot be in the input at the same time." ) indata = validateJSONInputNoCopy("blockBulk", indata) self.dbsBlockInsert.putBlock(indata) # send message to NATS if it is configured if self.nats: try: ddata = indata.get('dataset') if isinstance(ddata, dict) and 'dataset' in ddata: dataset = ddata.get('dataset') dataset_access_type = ddata.get('dataset_access_type') doc = { 'dataset': dataset, 'dataset_type': dataset_access_type } self.nats.publish(doc) except Exception as exp: err = 'insertDataset NATS error, %s, trace:\n%s' % ( str(exp), traceback.format_exc()) self.logger.warning(err) except cjson.DecodeError as dc: dbsExceptionHandler( "dbsException-invalid-input2", "Wrong format/data from insert BulkBlock input", self.logger.exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message) except HTTPError as he: raise he except Exception as ex: #illegal variable name/number if str(ex).find("ORA-01036") != -1: dbsExceptionHandler("dbsException-invalid-input2", "illegal variable name/number from input", self.logger.exception, str(ex)) else: sError = "DBSWriterModel/insertBulkBlock. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler( 'dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
def execute(self, conn, binds, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/DatasetParent/Insert. Expects db connection from upper layer." ) result = self.dbi.processData(self.sql, binds, conn, transaction)
def execute(self, conn, blockStats, transaction = False): """ for a given block_id """ if not conn: dbsExceptionHandler("dbsException-failed-connect2host", "dbs/dao/Oracle/Block/UpdateStatus expects db connection from upper layer.", self.logger.exception) result = self.dbi.processData(self.sql, blockStats, conn, transaction)
def insertBulkBlock(self): """ API to insert a bulk block :param blockDump: Output of the block dump command :type blockDump: dict """ try: body = request.body.read() indata = cjson.decode(body) if (indata.get("file_parent_list", []) and indata.get("dataset_parent_list", [])): dbsExceptionHandler("dbsException-invalid-input2", "insertBulkBlock: dataset and file parentages cannot be in the input at the same time", self.logger.exception, "insertBulkBlock: datset and file parentages cannot be in the input at the same time.") indata = validateJSONInputNoCopy("blockBulk", indata) self.dbsBlockInsert.putBlock(indata) except cjson.DecodeError as dc: dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert BulkBlock input", self.logger.exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message) except HTTPError as he: raise he except Exception as ex: #illegal variable name/number if str(ex).find("ORA-01036") != -1: dbsExceptionHandler("dbsException-invalid-input2", "illegal variable name/number from input", self.logger.exception, str(ex)) else: sError = "DBSWriterModel/insertBulkBlock. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
def insertOutputConfig(self, businput): """ Method to insert the Output Config. app_name, release_version, pset_hash, global_tag and output_module_label are required. args: businput(dic): input dictionary. Updated Oct 12, 2011 """ if not ("app_name" in businput and "release_version" in businput\ and "pset_hash" in businput and "output_module_label" in businput and "global_tag" in businput): dbsExceptionHandler( 'dbsException-invalid-input', "business/DBSOutputConfig/insertOutputConfig require:\ app_name, release_version, pset_hash, output_module_label and global_tag" ) conn = self.dbi.connection() tran = conn.begin() try: # Proceed with o/p module insertion businput['scenario'] = businput.get("scenario", None) businput['pset_name'] = businput.get("pset_name", None) self.outmodin.execute(conn, businput, tran) tran.commit() tran = None except SQLAlchemyIntegrityError as ex: if str(ex).find("unique constraint") != -1 or str(ex).lower().find( "duplicate") != -1: #if the validation is due to a unique constrain break in OUTPUT_MODULE_CONFIGS if str(ex).find("TUC_OMC_1") != -1: pass #otherwise, try again else: try: self.outmodin.execute(conn, businput, tran) tran.commit() tran = None except SQLAlchemyIntegrityError as ex1: if str(ex1).find("unique constraint") != -1 and str( ex1).find("TUC_OMC_1") != -1: pass except Exception as e1: if tran: tran.rollback() tran = None raise else: raise except Exception as e: if tran: tran.rollback() raise finally: if tran: tran.rollback() if conn: conn.close()
def execute(self, conn, daoinput, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/Site/Insert. Expects db connection from upper layer.") self.executeSingle(conn, daoinput, "SITES", transaction)
def updateStatus(self, dataset, is_dataset_valid): """ Used to toggle the status of a dataset is_dataset_valid=0/1 (invalid/valid) """ if (dataset == ""): dbsExceptionHandler( "dbsException-invalid-input", "DBSDataset/updateStatus. dataset is required.") conn = self.dbi.connection() trans = conn.begin() try: self.updatestatus.execute(conn, dataset, is_dataset_valid, trans) trans.commit() trans = None except Exception as ex: if trans: trans.rollback() raise ex finally: if trans: trans.rollback() if conn: conn.close()
def execute(self, conn, daoinput, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/Service/Insert. Expects db connection from upper layer." ) self.dbi.processData(self.sql, daoinput, conn, transaction)
def execute(self, conn, file_id_list, transaction=False): """ file_id_list : file_id_list """ if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/FileParentBlock/List. Expects db connection from upper layer." ) sql = self.sql binds = {} if file_id_list: count = 0 for an_id in file_id_list: if count > 0: sql += ", " sql += ":file_id_%s" % count binds.update({"file_id_%s" % count: an_id}) count += 1 sql += ")" else: dbsExceptionHandler( 'dbsException-invalid-input', "Oracle/FileParentBlock/List. this_file_id not provided") result = self.dbi.processData(sql, binds, conn, transaction) plist = self.formatDict(result) return plist
def listDatasetAccessTypes(self, dataset_access_type=""): """ List dataset access types """ if isinstance(dataset_access_type, basestring): try: dataset_access_type = str(dataset_access_type) except: dbsExceptionHandler( 'dbsException-invalid-input', 'dataset_access_type given is not valid : %s' % dataset_access_type) else: dbsExceptionHandler( 'dbsException-invalid-input', 'dataset_access_type given is not valid : %s' % dataset_access_type) conn = self.dbi.connection() try: plist = self.datasetAccessType.execute(conn, dataset_access_type.upper()) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['dataset_access_type'] = t return result finally: if conn: conn.close()
def execute(self, conn, logical_file_name='', block_id=0, block_name='', transaction=False): """ return {} if condition is not provided. """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed","Oracle/FileParent/List. Expects db connection from upper layer.") sql = '' binds = {} if logical_file_name: if isinstance(logical_file_name, basestring): wheresql = "WHERE F.LOGICAL_FILE_NAME = :logical_file_name" binds = {"logical_file_name": logical_file_name} sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql) elif isinstance(logical_file_name, list): wheresql = "WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)" lfn_generator, binds = create_token_generator(logical_file_name) sql = "{lfn_generator} {sql} {wheresql}".format(lfn_generator=lfn_generator, sql=self.sql, wheresql=wheresql) elif block_id != 0: wheresql = "WHERE F.BLOCK_ID = :block_id" binds ={'block_id': block_id} sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql) elif block_name: joins = "JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID".format(owner=self.owner) wheresql = "WHERE B.BLOCK_NAME= :block_name" binds ={'block_name': block_name} sql = "{sql} {joins} {wheresql}".format(sql=self.sql, joins=joins, wheresql=wheresql) else: return{} cursors = self.dbi.processData(sql, binds, conn, transaction=transaction, returnCursor=True) result = self.formatCursor(cursors[0]) return result
def execute(self, conn, dsType = "", dataset="", transaction = False): """ Lists all primary dataset types if no user input is provided. """ sql = self.sql binds={} if not dsType and not dataset: pass elif dsType and dataset in ("", None, '%'): op = ("=", "like")["%" in dsType] sql += "WHERE PDT.PRIMARY_DS_TYPE %s :primdstype"%op binds = {"primdstype":dsType} elif dataset and dsType in ("", None, '%'): op = ("=", "like")["%" in dataset] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset" %(self.owner, self.owner, op) binds={"dataset":dataset} elif dataset and dsType: op = ("=", "like")["%" in dsType] op1 = ("=", "like")["%" in dataset] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset and PDT.PRIMARY_DS_TYPE %s :primdstype" \ %(self.owner, self.owner, op1, op) binds = {"primdstype":dsType, "dataset":dataset} else: dbsExceptionHandler('dbsException-invalid-input', "DAO Primary_DS_TYPE List accepts no input, or\ dataset,primary_ds_type as input.", self.logger.exception) cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c, size=100)) return result
def insertDataTier(self, businput): """ Input dictionary has to have the following keys: data_tier_name, creation_date, create_by it builds the correct dictionary for dao input and executes the dao """ conn = self.dbi.connection() tran = conn.begin() try: businput["data_tier_id"] = self.sm.increment(conn, "SEQ_DT") businput["data_tier_name"] = businput["data_tier_name"].upper() self.dtin.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', "Invalid input:" + ke.args[0]) except Exception as ex: if str(ex).lower().find("unique constraint") != -1 or str( ex).lower().find("duplicate") != -1: # already exist self.logger.warning( "Unique constraint violation being ignored...") self.logger.warning("%s" % ex) else: if tran: tran.rollback() raise finally: if tran: tran.rollback() if conn: conn.close()
def listFileParents(self, logical_file_name="", block_id=0, block_name=""): """ required parameter: logical_file_name or block_name returns: this_logical_file_name, parent_logical_file_name, parent_file_id """ conn = self.dbi.connection() try: #self.logger.debug("lfn %s, block_name %s, block_id :%s" % (logical_file_name, block_name, block_id)) if not logical_file_name and not block_name and not block_id: dbsExceptionHandler('dbsException-invalid-input', \ "Logical_file_name, block_id or block_name is required for fileparents api" ) sqlresult = self.fileparentlist.execute(conn, logical_file_name, block_id, block_name) result = [] d = {} #self.logger.debug(sqlresult) for i in range(len(sqlresult)): k = sqlresult[i]['this_logical_file_name'] v = sqlresult[i]['parent_logical_file_name'] if k in d: d[k].append(v) else: d[k] = [v] for k, v in d.iteritems(): r = {'logical_file_name': k, 'parent_logical_file_name': v} result.append(r) return result finally: if conn: conn.close()
def execute(self, conn, daoinput, transaction=False): if not conn: dbsExceptionHandler( "dbsException-failed-connect2host", "Oracle/BlockParent/Insert. Expects db connection from upper layer.", self.logger.exception) binds = {} bindlist = [] for f in daoinput["child_parent_id_list"]: binds = {"parent_file_id": f[1]} bindlist.append(binds) r = self.dbi.processData(self.sql2, bindlist, conn, False) p_bk_id = self.format(r) c_block_name = daoinput["block_name"] for b in p_bk_id: try: binds = {"block_name": c_block_name, "parent_block_id": b[0]} self.dbi.processData(self.sql, binds, conn, transaction) except SQLAlchemyIntegrityError as ex: if (str(ex).find("ORA-00001") != -1 and (str(ex).find("PK_BP") != -1 or str(ex).lower().find("duplicate") != -1)): pass elif str(ex).find("ORA-01400") != -1: raise else: raise
def listFileSummary(self, block_name="", dataset="", run_num=-1, validFileOnly=0): """ required parameter: full block_name or dataset name. No wildcards allowed. run_num is optional. """ conn = self.dbi.connection() try: if not block_name and not dataset: msg = "Block_name or dataset is required for listFileSummary API" dbsExceptionHandler('dbsException-invalid-input', msg) if '%' in block_name or '*' in block_name or '%' in dataset or '*' in dataset: msg = "No wildcard is allowed in block_name or dataset for filesummaries API" dbsExceptionHandler('dbsException-invalid-input', msg) result = self.filesummarylist.execute(conn, block_name, dataset, run_num, validFileOnly=validFileOnly) if len(result) == 1: if result[0]['num_file']==0 and result[0]['num_block']==0 \ and result[0]['num_event']==0 and result[0]['file_size']==0: result = [] return result finally: if conn: conn.close()
def execute( self, associated_filesObj, conn, transaction=False ): if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/AssociatedFile/Insert. Expects db connection from upper layer.") ##binds = self.getBinds( associated_filesObj ) result = self.dbi.processData(self.sql, binds, conn, transaction) return
def insertAcquisitionEra(self, businput): """ Input dictionary has to have the following keys: acquisition_era_name, creation_date, create_by, start_date, end_date. it builds the correct dictionary for dao input and executes the dao """ conn = self.dbi.connection() tran = conn.begin() try: businput["acquisition_era_id"] = self.sm.increment( conn, "SEQ_AQE", tran) businput["acquisition_era_name"] = businput["acquisition_era_name"] #self.logger.warning(businput) self.acqin.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', "Invalid input:" + ke.args[0]) except Exception as ex: if str(ex).lower().find("unique constraint") != -1 or str( ex).lower().find("duplicate") != -1: dbsExceptionHandler( 'dbsException-invalid-input2', "Invalid input: acquisition_era_name already exists in DB", serverError="%s" % ex) else: raise finally: if tran: tran.rollback() if conn: conn.close()
def listFileChildren(self, logical_file_name='', block_name='', block_id=0): """ required parameter: logical_file_name or block_name or block_id returns: logical_file_name, child_logical_file_name, parent_file_id """ conn = self.dbi.connection() try: if not logical_file_name and not block_name and not block_id: dbsExceptionHandler('dbsException-invalid-input',\ "Logical_file_name, block_id or block_name is required for listFileChildren api") sqlresult = self.filechildlist.execute(conn, logical_file_name, block_name, block_id) d = {} result = [] for i in range(len(sqlresult)): k = sqlresult[i]['logical_file_name'] v = sqlresult[i]['child_logical_file_name'] if k in d: d[k].append(v) else: d[k] = [v] for k, v in d.iteritems(): r = {'logical_file_name':k, 'child_logical_file_name': v} result.append(r) return result finally: if conn: conn.close()
def insertProcessingEra(self, businput): """ Input dictionary has to have the following keys: processing_version, creation_date, create_by, description it builds the correct dictionary for dao input and executes the dao """ conn = self.dbi.connection() tran = conn.begin() try: businput["processing_era_id"] = self.sm.increment(conn, "SEQ_PE", tran) businput["processing_version"] = businput["processing_version"] self.pein.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', "Invalid input:" + ke.args[0]) except Exception as ex: if (str(ex).lower().find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1): # already exist self.logger.warning("DBSProcessingEra/insertProcessingEras. " + "Unique constraint violation being ignored...") self.logger.warning(ex) else: if tran: tran.rollback() tran = None raise finally: if tran: tran.rollback() if conn: conn.close()
def execute( self, conn, binds, transaction=False ): if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/ProcessingEra/Insert. Expects db connection from upper layer.") self.executeSingle(conn, binds, "PROCESSING_ERAS", transaction) return
def listPhysicsGroups(self, physics_group_name=""): """ Returns all physics groups if physics group names are not passed. """ if not isinstance(physics_group_name, basestring): dbsExceptionHandler( 'dbsException-invalid-input', 'physics group name given is not valid : %s' % physics_group_name) else: try: physics_group_name = str(physics_group_name) except: dbsExceptionHandler( 'dbsException-invalid-input', 'physics group name given is not valid : %s' % physics_group_name) conn = self.dbi.connection() try: result = self.pglist.execute(conn, physics_group_name) return result finally: if conn: conn.close()
def execute(self, conn, daoinput, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/FileLumi/Insert. Expects db connection from upper layer." ) self.dbi.processData(self.sql, daoinput, conn, transaction)
def execute(self, conn, primary_ds_name="", primary_ds_type="", transaction=False): """ Lists all primary datasets if pattern is not provided. """ if not conn: dbsExceptionHandler('dbsException-db-conn-failed', "ParimaryDataset/List expects db connection from upper layer.") sql = self.sql binds = {} #import pdb #pdb.set_trace() if primary_ds_name and primary_ds_type in ('', None, '%'): op = ("=", "like")["%" in primary_ds_name] sql += "WHERE P.PRIMARY_DS_NAME %s :primary_ds_name" % op binds.update(primary_ds_name=primary_ds_name) elif primary_ds_type and primary_ds_name in ('', None, '%'): op = ("=", "like")["%" in primary_ds_type] sql += "WHERE PT.PRIMARY_DS_TYPE %s :primary_ds_type" % op binds.update(primary_ds_type=primary_ds_type) elif primary_ds_name and primary_ds_type: op = ("=", "like")["%" in primary_ds_name] op1 = ("=", "like")["%" in primary_ds_type] sql += "WHERE P.PRIMARY_DS_NAME %s :primary_ds_name and PT.PRIMARY_DS_TYPE %s :primary_ds_type"\ %(op,op1) binds.update(primary_ds_name=primary_ds_name) binds.update(primary_ds_type=primary_ds_type) else: pass cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) if len(cursors) == 0 : return [] else: return self.formatCursor(cursors[0])
def execute(self, conn, run_num=-1, logical_file_name="", block_name="", dataset="", trans=False): """ Lists all primary datasets if pattern is not provided. """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed","Oracle/DatasetRun/List. Expects db connection from upper layer.") sql = self.sql binds = {} if logical_file_name and "%" not in logical_file_name: sql += """ inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID WHERE FILES.LOGICAL_FILE_NAME = :logical_file_name"""%(self.owner) binds["logical_file_name"] = logical_file_name elif block_name and "%" not in block_name: sql += """ inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID inner join %sBLOCKS BLOCKS on BLOCKS.BLOCK_ID = FILES.BLOCK_ID WHERE BLOCKS.BLOCK_NAME = :block_name """%(self.owner, self.owner) binds["block_name"] = block_name elif dataset and "%" not in dataset: sql += """ inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID inner join %sDATASETS DATASETS on DATASETS.DATASET_ID = FILES.DATASET_ID WHERE DATASETS.DATASET = :dataset """%(self.owner, self.owner) binds["dataset"] = dataset else: pass if run_num != -1: andorwhere = ("WHERE", "AND")["WHERE" in sql] run_list = [] wheresql_run_list = '' wheresql_run_range = '' # for r in parseRunRange(run_num): if isinstance(r, str) or isinstance(r, int) or isinstance(r, long): run_list.append(str(r)) if isinstance(r, run_tuple): if r[0] == r[1]: dbsExceptionHandler('dbsException-invalid-input', "DBS run_num range must be apart at least by 1.") wheresql_run_range = " FL.RUN_NUM between :minrun and :maxrun " binds.update({"minrun":r[0]}) binds.update({"maxrun":r[1]}) # if run_list: wheresql_run_list = " fl.RUN_NUM in (SELECT TOKEN FROM TOKEN_GENERATOR) " run_generator, run_binds = create_token_generator(run_list) sql = "{run_generator}".format(run_generator=run_generator) + sql binds.update(run_binds) if wheresql_run_range and wheresql_run_list: sql += " %s (" %andorwhere + wheresql_run_range + " or " + wheresql_run_list + " )" elif wheresql_run_range and not wheresql_run_list: sql += " %s " %andorwhere + wheresql_run_range elif not wheresql_run_range and wheresql_run_list: sql += " %s " %andorwhere + wheresql_run_list #self.logger.debug(sql) cursors = self.dbi.processData(sql, binds, conn, transaction=trans, returnCursor=True) result=[] for i in range(len(cursors)): result.extend(self.formatCursor(cursors[i])) return result
def submit(self): """ Interface for submitting a migration request. Required input keys: MIGRATION_URL: The source DBS url for migration. MIGRATION_INPUT: The block or dataset names to be migrated. """ body = request.body.read() indata = cjson.decode(body) try: indata = validateJSONInputNoCopy("migration_rqst", indata) indata.update({ "creation_date": dbsUtils().getTime(), "last_modification_date": dbsUtils().getTime(), "create_by": dbsUtils().getCreateBy(), "last_modified_by": dbsUtils().getCreateBy(), "migration_status": 0 }) return self.dbsMigrate.insertMigrationRequest(indata) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception, ex: sError = "DBSMigrateModle/submit. %s\n Exception trace: \n %s." \ % (ex, traceback.format_exc() ) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
def execute(self, conn, dataset, transaction=False): """ dataset is required parameter""" if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/DatasetParent/List. Expects db connection from upper layer." ) sql = self.sql sql += "WHERE D.DATASET = :dataset" binds = {"dataset": dataset} #self.logger.error( sql) #self.logger.error("binds=%s" %binds) cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) #assert len(cursors) == 1, "Dataset parent does not exist" result = [] for c in cursors: result.extend(self.formatCursor(c)) return result #conn.close() return result
def insertOutputModuleConfig(self, remoteConfig, migration=False): """ Insert Release version, application, parameter set hashes and the map(output module config). """ otptIdList = [] missingList = [] conn = self.dbi.connection() try: for c in remoteConfig: cfgid = self.otptModCfgid.execute(conn, app = c["app_name"], release_version = c["release_version"], pset_hash = c["pset_hash"], output_label = c["output_module_label"], global_tag=c['global_tag']) if cfgid <= 0 : missingList.append(c) else: key = (c['app_name'] + ':' + c['release_version'] + ':' + c['pset_hash'] + ':' + c['output_module_label'] + ':' + c['global_tag']) self.datasetCache['conf'][key] = cfgid otptIdList.append(cfgid) #print "About to set cfgid: %s" % str(cfgid) except KeyError, ex: if conn:conn.close() dbsExceptionHandler("dbsException-invalid-input2", "DBSBlockInsert/insertOutputModuleConfig: \ KeyError exception: %s. " %ex.args[0] )
def listDatasetAccessTypes(self, dataset_access_type=""): """ List dataset access types """ if isinstance(dataset_access_type, basestring): try: dataset_access_type = str(dataset_access_type) except: dbsExceptionHandler( "dbsException-invalid-input", "dataset_access_type given is not valid : %s" % dataset_access_type ) else: dbsExceptionHandler( "dbsException-invalid-input", "dataset_access_type given is not valid : %s" % dataset_access_type ) conn = self.dbi.connection() try: plist = self.datasetAccessType.execute(conn, dataset_access_type.upper()) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]["dataset_access_type"] = t return result finally: if conn: conn.close()
def listReleaseVersions(self, release_version="", dataset='', logical_file_name=''): """ List release versions """ if dataset and ('%' in dataset or '*' in dataset): dbsExceptionHandler('dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n.") if logical_file_name and ('%' in logical_file_name or '*' in logical_file_name): dbsExceptionHandler('dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in logical_file_name.\n.") conn = self.dbi.connection() try: plist = self.releaseVersion.execute(conn, release_version.upper(), dataset, logical_file_name) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['release_version'] = t return result finally: if conn: conn.close()
def validateStringInput(input_key,input_data, read=False): """ To check if a string has the required format. This is only used for POST APIs. """ log = clog.error_log func = None if '*' in input_data or '%' in input_data: func = validationFunctionWildcard.get(input_key) if func is None: func = searchstr elif input_key == 'migration_input' : if input_data.find('#') != -1 : func = block else : func = dataset else: if not read: func = validationFunction.get(input_key) if func is None: func = namestr else: if input_key == 'dataset': func = reading_dataset_check elif input_key == 'block_name': func = reading_block_check elif input_key == 'logical_file_name': func = reading_lfn_check else: func = namestr try: func(input_data) except AssertionError as ae: serverLog = str(ae) + " key-value pair (%s, %s) cannot pass input checking" %(input_key, input_data) #print serverLog dbsExceptionHandler("dbsException-invalid-input2", message="Invalid Input Data %s...: Not Match Required Format" %input_data[:10], \ logger=log.error, serverError=serverLog) return input_data
def insertDataTier(self, businput): """ Input dictionary has to have the following keys: data_tier_name, creation_date, create_by it builds the correct dictionary for dao input and executes the dao """ conn = self.dbi.connection() tran = conn.begin() try: businput["data_tier_id"] = self.sm.increment(conn, "SEQ_DT" ) businput["data_tier_name"] = businput["data_tier_name"].upper() self.dtin.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', "Invalid input:"+ke.args[0]) except Exception as ex: if str(ex).lower().find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1: # already exist self.logger.warning("Unique constraint violation being ignored...") self.logger.warning("%s" % ex) else: if tran: tran.rollback() raise finally: if tran: tran.rollback() if conn: conn.close()
def updateType(self, dataset, dataset_access_type): """ Used to change the status of a dataset type (production/etc.) """ if (dataset == ""): dbsExceptionHandler("dbsException-invalid-input", "DBSDataset/updateType. dataset is required.") conn = self.dbi.connection() trans = conn.begin() try: self.updatetype.execute(conn, dataset, dataset_access_type.upper(), trans) trans.commit() trans = None except SQLAlchemyDatabaseError as ex: if str(ex).find("ORA-01407") != -1: dbsExceptionHandler( "dbsException-invalid-input2", "Invalid Input", None, "DBSDataset/updateType. A Valid dataset_access_type is required." ) finally: if trans: trans.rollback() if conn: conn.close()
def listFileSummary(self, block_name="", dataset="", run_num=-1, validFileOnly=0): """ required parameter: full block_name or dataset name. No wildcards allowed. run_num is optional. """ if not block_name and not dataset: msg = "Block_name or dataset is required for listFileSummary API" dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception) if '%' in block_name or '*' in block_name or '%' in dataset or '*' in dataset: msg = "No wildcard is allowed in block_name or dataset for filesummaries API" dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception) # with self.dbi.connection() as conn: for item in self.filesummarylist.execute( conn, block_name, dataset, run_num, validFileOnly=validFileOnly): if item['num_file']==0 and item['num_block']==0 \ and item['num_event']==0 and item['file_size']==0: yield else: yield item
def execute(self, conn, daoinput, transaction = False): if not conn: dbsExceptionHandler("dbsException-db-conn-failed","Oracle/File/Insert. Expects db connection from upper layer.") #print "About to insert file with dataset id" #print binds[0]['dataset_id'] self.dbi.processData(self.sql, daoinput, conn, transaction)
def execute(self, conn, daoinput, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "PrimaryDataset/Insert expects db connection from upper layer." ) self.executeSingle(conn, daoinput, "PRIMARY_DATASETS", transaction)
def insertAcquisitionEra(self, businput): """ Input dictionary has to have the following keys: acquisition_era_name, creation_date, create_by, start_date, end_date. it builds the correct dictionary for dao input and executes the dao """ conn = self.dbi.connection() tran = conn.begin() try: businput["acquisition_era_id"] = self.sm.increment(conn, "SEQ_AQE", tran) businput["acquisition_era_name"] = businput["acquisition_era_name"] #self.logger.warning(businput) self.acqin.execute(conn, businput, tran) tran.commit() tran = None except KeyError as ke: dbsExceptionHandler('dbsException-invalid-input', "Invalid input:"+ke.args[0]) except Exception as ex: if str(ex).lower().find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1: dbsExceptionHandler('dbsException-invalid-input2', "Invalid input: acquisition_era_name already exists in DB", serverError="%s" %ex) else: raise finally: if tran: tran.rollback() if conn: conn.close()
def processDatasetBlocks(self, url, conn, inputdataset, order_counter): """ Utility function, that comapares blocks of a dataset at source and dst and returns an ordered list of blocks not already at dst for migration """ ordered_dict = {} srcblks = self.getSrcBlocks(url, dataset=inputdataset) if len(srcblks) < 0: e = "DBSMigration: No blocks in the required dataset %s found at source %s." % ( inputdataset, url) dbsExceptionHandler('dbsException-invalid-input2', e, self.logger.exception, e) dstblks = self.blocklist.execute(conn, dataset=inputdataset) self.logger.debug("******* dstblks for dataset %s ***********" % inputdataset) self.logger.debug(dstblks) blocksInSrcNames = [y['block_name'] for y in srcblks] blocksInDstNames = [] for item in dstblks: blocksInDstNames.append(item['block_name']) ordered_dict[order_counter] = [] for ablk in blocksInSrcNames: if not ablk in blocksInDstNames: ordered_dict[order_counter].append(ablk) if ordered_dict[order_counter] != []: self.logger.debug("**** ordered_dict dict length ****") self.logger.debug(len(ordered_dict)) return ordered_dict else: return {}
def execute(self, conn, origin_site_name="", dataset="", block_name="", transaction = False): """ origin_site_name: T1_US_FNAL_Buffer dataset: /a/b/c block_name: /a/b/c#d """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/Block/List. Expects db connection from upper layer.", self.logger.exception) binds = {} if origin_site_name: wheresql = 'WHERE B.ORIGIN_SITE_NAME = :origin_site_name' binds.update(origin_site_name=origin_site_name) if dataset: if 'wheresql' in locals(): wheresql += ' AND DS.DATASET = :dataset' else: wheresql = 'WHERE DS.DATASET = :dataset' binds.update(dataset=dataset) if block_name: if 'wheresql' in locals(): wheresql += ' AND B.BLOCK_NAME = :block_name' else: wheresql = 'WHERE B.BLOCK_NAME = :block_name' binds.update(block_name=block_name) sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql) cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for cursor in cursors: result.extend(self.formatCursor(cursor)) return result
def listDatasetArray(self, inputdata=None): if not inputdata: dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/listDatasetArray API requires \ at least a list of dataset or dataset_id.') else: dataset = None dataset_id = -1 #self.logger.error("******input data ******") #self.logger.error( inputdata) if "dataset" in inputdata: dataset = inputdata["dataset"] elif "dataset_id" in inputdata: dataset_id = inputdata["dataset_id"] else: dbsExceptionHandler('dbsException-invalid-input2', "Invalid input", None, "business/listDatasetArray requires at least a list of dataset or dataset_id") is_dataset_valid = inputdata.get("is_dataset_valid", 1) dataset_access_type = inputdata.get("dataset_access_type", None) detail = inputdata.get("detail", False) dao = (self.datasetbrieflist, self.datasetlist)[detail] with self.dbi.connection() as conn: result = dao.execute(conn, dataset=dataset, is_dataset_valid=is_dataset_valid, dataset_access_type=dataset_access_type, dataset_id=dataset_id, transaction=False) for r in result: yield r
def listBlocksOrigin(self, origin_site_name="", dataset="", block_name=""): """ This is the API to list all the blocks/datasets first generated in the site called origin_site_name, if origin_site_name is provided w/ no wildcards allow. If a fully spelled dataset is provided, then it will only list the blocks first generated from origin_site_name under the given dataset. """ if not (dataset or block_name): dbsExceptionHandler( "dbsException-invalid-input", "DBSBlock/listBlocksOrigin: dataset or block_name must be provided." ) if re.search("['%', '*']", dataset) or re.search( "['%', '*']", block_name): dbsExceptionHandler( "dbsException-invalid-input", "DBSBlock/listBlocksOrigin: dataset or block_name with wildcard is not supported." ) try: conn = self.dbi.connection() result = self.bkOriginlist.execute(conn, origin_site_name, dataset, block_name) return result finally: if conn: conn.close()
def execute(self, conn, logical_file_name, is_file_valid, lost, transaction=False): """ for a given file or a list of files """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/File/UpdateStatus. Expects db connection from upper layer.") binds = dict(myuser=dbsUtils().getCreateBy(), mydate=dbsUtils().getTime(), is_file_valid=is_file_valid) if isinstance(logical_file_name, list): lfn_generator, lfn_binds = create_token_generator(logical_file_name) ###with clause - subquery factory does only work with select statements, therefore lfn_generator ###has to be place in front of the SELECT statement in the WHERE clause ###http://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:8120272301765 wheresql = """WHERE F.LOGICAL_FILE_NAME in ({lfn_generator} SELECT TOKEN FROM TOKEN_GENERATOR) """.format(lfn_generator=lfn_generator) binds.update(lfn_binds) else: wheresql = "where F.LOGICAL_FILE_NAME=:logical_file_name" binds.update(logical_file_name=logical_file_name) if lost: sql = "{sql}, file_size=0 {wheresql}".format(sql=self.sql, wheresql=wheresql) else: sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql) self.dbi.processData(sql, binds, conn, transaction)
def processDatasetBlocks(self, url, conn, inputdataset, order_counter): """ Utility function, that comapares blocks of a dataset at source and dst and returns an ordered list of blocks not already at dst for migration """ ordered_dict = {} srcblks = [] srcblks = self.getSrcBlocks(url, dataset=inputdataset) if len(srcblks) == 0: e = "DBSMigration: No blocks in the required dataset %s found at source %s."%(inputdataset, url) dbsExceptionHandler('dbsException-invalid-input2', e, self.logger.exception, e) dstblks = self.blocklist.execute(conn, dataset=inputdataset) self.logger.debug("******* dstblks for dataset %s ***********" %inputdataset) self.logger.debug(dstblks) blocksInSrcNames = [ y['block_name'] for y in srcblks] blocksInDstNames = [] for item in dstblks: blocksInDstNames.append(item['block_name']) ordered_dict[order_counter] = [] for ablk in blocksInSrcNames: if not ablk in blocksInDstNames: ordered_dict[order_counter].append(ablk) if ordered_dict[order_counter] != []: self.logger.debug("**** ordered_dict dict length ****") self.logger.debug(len(ordered_dict)) return ordered_dict else: return {}
def execute(self, conn, daoinput, transaction=False): """ daoinput must be validated to have the following keys: this_file_id, parent_logical_file_name """ if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/FileParent/Insert. Expects db connection from upper layer." ) binds = {} bindlist = [] if isinstance(daoinput, dict): self.dbi.processData(self.sql, daoinput, conn, transaction) elif isinstance(daoinput, list): for pf in daoinput: binds = { "this_file_id": pf["this_file_id"], "parent_logical_file_name": pf["parent_logical_file_name"] } bindlist.append(binds) self.dbi.processData(self.sql, bindlist, conn, transaction) else: dbsExceptionHandler( 'dbsException-invalid-input2', "file id and parent lfn are required for FileParent insert dao." )
def execute(self, conn, block_name="", transaction=False): """ block: /a/b/c#d """ if not conn: msg = 'Oracle/BlockParent/List. No DB connection found' dbsExceptionHandler('dbsException-db-conn-failed', msg) sql = self.sql binds = {} if block_name: binds.update(block_name=block_name) else: dbsExceptionHandler( "dbsException-invalid-input", "Oracle/BlockParent/ListChild. block_name must be provided.") cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c)) return result
def execute(self, conn, logical_file_name, block_name, block_id, transaction=False): """ Lists all primary datasets if pattern is not provided. """ binds = {} sql = '' if logical_file_name: if isinstance(logical_file_name, basestring): wheresql = "WHERE F.LOGICAL_FILE_NAME = :logical_file_name" binds = {"logical_file_name": logical_file_name} sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql) elif isinstance(logical_file_name, list): wheresql = "WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)" lfn_generator, binds = create_token_generator(logical_file_name) sql = "{lfn_generator} {sql} {wheresql}".format(lfn_generator=lfn_generator, sql=self.sql, wheresql=wheresql) elif block_name: joins = "JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID".format(owner=self.owner) wheresql = "WHERE B.BLOCK_NAME = :block_name" binds = {"block_name": block_name} sql = "{sql} {joins} {wheresql}".format(sql=self.sql, joins=joins, wheresql=wheresql) elif block_id: wheresql = "WHERE F.BLOCK_ID = :block_id" binds = {"block_id": block_id} sql = "{sql} {wheresql}".format(sql=self.sql, wheresql=wheresql) else: dbsExceptionHandler('dbsException-invalid-input', "Logical_file_names is required for listChild dao.", self.logger.exception) cursors = self.dbi.processData(sql, binds, conn, transaction=transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c, size=100)) return result
def listBlockParents(self, block_name=""): """ list parents of a block """ if not block_name: msg = " DBSBlock/listBlockParents. Block_name must be provided as a string or a list. \ No wildcards allowed in block_name/s." dbsExceptionHandler('dbsException-invalid-input', msg) elif isinstance(block_name, basestring): try: block_name = str(block_name) if '%' in block_name or '*' in block_name: dbsExceptionHandler("dbsException-invalid-input", "DBSReaderModel/listBlocksParents: \ NO WILDCARDS allowed in block_name.") except: dbsExceptionHandler("dbsException-invalid-input", "DBSBlock/listBlockParents. Block_name must be \ provided as a string or a list. No wildcards allowed in block_name/s .") elif type(block_name) is list: for b in block_name: if '%' in b or '*' in b: dbsExceptionHandler("dbsException-invalid-input", "DBSReaderModel/listBlocksParents: \ NO WILDCARDS allowed in block_name.") else: msg = "DBSBlock/listBlockParents. Block_name must be provided as a string or a list. \ No wildcards allowed in block_name/s ." dbsExceptionHandler("dbsException-invalid-input", msg) conn = self.dbi.connection() try: results = self.blockparentlist.execute(conn, block_name) return results finally: if conn: conn.close()
def execute(self, conn, block_name="", transaction=False): """ block: /a/b/c#d """ if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/BlockParent/List. Expects db connection from upper layer." ) sql = self.sql if isinstance(block_name, basestring): binds = {'block_name': block_name} elif type(block_name) is list: binds = [{'block_name': x} for x in block_name] else: msg = "Oracle/BlockParent/List. Block_name must be provided either as a string or as a list." dbsExceptionHandler('dbsException-invalid-input', msg) cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for i in cursors: d = self.formatCursor(i) if d: result += d return result
def listFileParents(self, logical_file_name="", block_id=0, block_name=""): """ required parameter: logical_file_name or block_name returns: this_logical_file_name, parent_logical_file_name, parent_file_id """ conn = self.dbi.connection() try: #self.logger.debug("lfn %s, block_name %s, block_id :%s" % (logical_file_name, block_name, block_id)) if not logical_file_name and not block_name and not block_id: dbsExceptionHandler('dbsException-invalid-input', \ "Logical_file_name, block_id or block_name is required for fileparents api" ) sqlresult = self.fileparentlist.execute(conn, logical_file_name, block_id, block_name) result = [] d = {} #self.logger.debug(sqlresult) for i in range(len(sqlresult)): k = sqlresult[i]['this_logical_file_name'] v = sqlresult[i]['parent_logical_file_name'] if k in d: d[k].append(v) else: d[k] = [v] for k, v in d.iteritems(): r = {'logical_file_name':k, 'parent_logical_file_name': v} result.append(r) return result finally: if conn: conn.close()
def execute(self, conn, transaction=False): if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/DoNothing/List. Expects db connection from upper layer." ) return []
def execute(self, conn, daoinput, transaction=False): """ daoinput keys: migration_request_id """ if not conn: dbsExceptionHandler( "dbsException-db-conn-failed", "Oracle/MigrationRequests/Remove. Expects db connection from upper layer." ) daoinput['create_by'] = dbsUtils().getCreateBy() try: msg = "DBSMigration: Invalid request. Sucessfully processed or processing requests cannot be removed,\ or the requested migration did not exist, or the requestor for removing and creating has to be the same user. " checkit = self.dbi.processData(self.select, daoinput, conn, transaction) if self.formatDict(checkit)[0]["count"] >= 1: reqID = {'migration_rqst_id': daoinput['migration_rqst_id']} result = self.dbi.processData(self.sql, reqID, conn, transaction) else: dbsExceptionHandler('dbsException-invalid-input', msg) except: raise
def validateStringInput(input_key,input_data): """ To check if a string has the required format. This is only used for POST APIs. """ log = clog.error_log func = None if '*' in input_data or '%' in input_data: func = validationFunctionWildcard.get(input_key) if func is None: func = searchstr elif input_key == 'migration_input' : if input_data.find('#') != -1 : func = block else : func = dataset else: func = validationFunction.get(input_key) if func is None: func = namestr try: func(input_data) except AssertionError as ae: serverLog = str(ae) + " key-value pair (%s, %s) cannot pass input checking" %(input_key, input_data) #print serverLog dbsExceptionHandler("dbsException-invalid-input2", message="Invalid Input Data %s...: Not Match Required Format" %input_data[:10], \ logger=log.error, serverError=serverLog) return input_data
def execute(self, conn, acquisition_era_name,end_date, transaction = False): """ for a given block_id """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "dbs/dao/Oracle/AcquisitionEra/updateEndDate expects db connection from upper layer.") binds = { "acquisition_era_name" :acquisition_era_name , "end_date" : end_date } result = self.dbi.processData(self.sql, binds, conn, transaction)
def execute ( self, conn, dataset, is_dataset_valid, transaction=False ): """ for a given file """ if not conn: dbsExceptionHandler("dbsException-db-conn-failed", "Oracle/Dataset/UpdateStatus. Expects db connection from upper layer.") binds = { "dataset" : dataset , "is_dataset_valid" : is_dataset_valid, "mydate": dbsUtils().getTime(), "myuser": dbsUtils().getCreateBy()} result = self.dbi.processData(self.sql, binds, conn, transaction)
def execute ( self, conn, dataset, dataset_access_type, transaction=False ): """ for a given file """ if not conn: dbsExceptionHandler("dbsException-failed-connect2host", "Oracle/Dataset/UpdateType. Expects db connection from upper layer.", self.logger.exception) binds = { "dataset" : dataset , "dataset_access_type" : dataset_access_type ,"myuser": dbsUtils().getCreateBy(), "mydate": dbsUtils().getTime() } result = self.dbi.processData(self.sql, binds, conn, transaction)