예제 #1
0
def copy_global_tag(arguments):
	raise NotImplementedError("Copying Global Tags is currently not supported for this transition command-line interface for CondDBFW.")

	# set up connection
	source_connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)
	dest_connection = querying.connect(arguments.dest_db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)

	# get CondDBFW Global Tag object
	global_tag = source_connection.global_tag(name=arguments.input_gt)
	if global_tag == None:
		raise Exception("Source Global Tag doesn't exist.")

	tag_names = global_tag.tags().get_members("tag_name").data()
	tags = source_connection.tag(name=tags)

	# copy global tag first
	global_tag.insertion_time = datetime.datetime.now()
	global_tag.validity = -1
	dest_connection.write_and_commit(global_tag)

	for tag in tags:
		# create temporary argument class
		class args(object):
			def __init__(self):
				self.input_tag = tag.name
				self.dest_tag = tag.name
				self.start = 1
				self.end = tag.latest_iov()+1
				for attribute in dir(arguments):
					self.__dict__[attribute] = getattr(arguments, attribute)

		copy_tag(args())
예제 #2
0
def copy_global_tag(arguments):
	raise NotImplementedError("Copying Global Tags is currently not supported for this transition command-line interface for CondDBFW.")

	# set up connection
	source_connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)
	dest_connection = querying.connect(arguments.dest_db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)

	# get CondDBFW Global Tag object
	global_tag = source_connection.global_tag(name=arguments.input_gt)
	if global_tag == None:
		raise Exception("Source Global Tag doesn't exist.")

	tag_names = global_tag.tags().get_members("tag_name").data()
	tags = source_connection.tag(name=tags)

	# copy global tag first
	global_tag.insertion_time = datetime.datetime.now()
	global_tag.validity = -1
	dest_connection.write_and_commit(global_tag)

	for tag in tags:
		# create temporary argument class
		class args(object):
			def __init__(self):
				self.input_tag = tag.name
				self.dest_tag = tag.name
				self.start = 1
				self.end = tag.latest_iov()+1
				for attribute in dir(arguments):
					self.__dict__[attribute] = getattr(arguments, attribute)

		copy_tag(args())
예제 #3
0
def copy_tag(arguments):

	# set up connection
	source_connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)
	dest_connection = querying.connect(arguments.dest_db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)

	# get tag from the source database, adjust it, and copy it (with the defined IOV range) to the destination database

	print("Reading source Tag.")
	source_tag = source_connection.tag(name=arguments.input_tag)
	if source_tag == None:
		raise Exception("Source Tag doesn't exist.")

	# get all IOVs within the range [start, end]
	print("Reading source IOVs.")
	since_range = source_connection.range(arguments.start, arguments.end)
	source_iovs = source_tag.iovs(since=since_range).data()

	# get hashes of all IOVs contained in the Tag in the source database
	print("Reading source Payloads.")
	hashes = source_tag.iovs().get_members("payload_hash").data()
	payloads = source_connection.payload(hash=hashes)

	print("Writing to destination database...")

	# set end_of_validity to -1 because sqlite doesn't support long ints
	source_tag.end_of_validity = -1
	source_tag.name = arguments.dest_tag
	source_tag.modification_time = datetime.datetime.now()

	# create new iovs
	new_iovs = []
	for iov in source_iovs:
		new_iovs.append(dest_connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False))

	# write new tag to destination database
	print("Writing destination Tag.")
	if dest_connection.tag(name=arguments.dest_tag) != None:
		dest_connection.write_and_commit(source_tag)

	# write new iovs
	print("Writing IOVs to destination Tag.")
	for iov in new_iovs:
		if dest_connection.iov(tag_name=iov.tag_name, since=iov.since, insertion_time=iov.insertion_time) == None:
			dest_connection.write_and_commit(iov)

	# get payloads used by IOVs and copy those over
	print("Copying Payloads over.")
	for payload in payloads:
		if dest_connection.payload(hash=payload.hash) == None:
			dest_connection.write_and_commit(payload)

	print("Copy complete.")
예제 #4
0
def copy_tag(arguments):

	# set up connection
	source_connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)
	dest_connection = querying.connect(arguments.dest_db, secrets=arguments.secrets, mode=arguments.mode, map_blobs=True)

	# get tag from the source database, adjust it, and copy it (with the defined IOV range) to the destination database

	print("Reading source Tag.")
	source_tag = source_connection.tag(name=arguments.input_tag)
	if source_tag == None:
		raise Exception("Source Tag doesn't exist.")

	# get all IOVs within the range [start, end]
	print("Reading source IOVs.")
	since_range = source_connection.range(arguments.start, arguments.end)
	source_iovs = source_tag.iovs(since=since_range).data()

	# get hashes of all IOVs contained in the Tag in the source database
	print("Reading source Payloads.")
	hashes = source_tag.iovs().get_members("payload_hash").data()
	payloads = source_connection.payload(hash=hashes)

	print("Writing to destination database...")

	# set end_of_validity to -1 because sqlite doesn't support long ints
	source_tag.end_of_validity = -1
	source_tag.name = arguments.dest_tag
	source_tag.modification_time = datetime.datetime.now()

	# create new iovs
	new_iovs = []
	for iov in source_iovs:
		new_iovs.append(dest_connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False))

	# write new tag to destination database
	print("Writing destination Tag.")
	if dest_connection.tag(name=arguments.dest_tag) != None:
		dest_connection.write_and_commit(source_tag)

	# write new iovs
	print("Writing IOVs to destination Tag.")
	for iov in new_iovs:
		if dest_connection.iov(tag_name=iov.tag_name, since=iov.since, insertion_time=iov.insertion_time) == None:
			dest_connection.write_and_commit(iov)

	# get payloads used by IOVs and copy those over
	print("Copying Payloads over.")
	for payload in payloads:
		if dest_connection.payload(hash=payload.hash) == None:
			dest_connection.write_and_commit(payload)

	print("Copy complete.")
예제 #5
0
def search(arguments):

	raise NotImplementedError("Todo")

	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	search_string = connection.regexp(".*%s.*" % arguments.string)
예제 #6
0
def search(arguments):

	raise NotImplementedError("Todo")

	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	search_string = connection.regexp(".*%s.*" % arguments.string)
예제 #7
0
    def send_payloads(self, hashes, upload_session_id):
        """
		Send a list of payloads corresponding to hashes we got from the SQLite file and filtered by asking the server.
		"""
        # if we have no hashes, we can't send anything
        # but don't exit since it might mean all the Payloads were already on the server
        if len(hashes) == 0:
            self._outputter.write(
                "No hashes to send - moving to metadata upload.")
            return True
        else:
            self._outputter.write("Sending payloads of hashes not found:")
            # construct connection string for local SQLite database file
            database = (
                "sqlite://%s" %
                os.path.abspath(self.sqlite_file_name)) if type(
                    self.sqlite_file_name) == str else self.sqlite_file_name
            # create CondDBFW connection that maps blobs - as we need to query for payload BLOBs (disabled by default in CondDBFW)
            self._outputter.write("\tConnecting to input SQLite database.")
            con = querying.connect(database, map_blobs=True)

            # query for the Payloads
            self._outputter.write(
                "\tGetting Payloads from SQLite database based on list of hashes."
            )
            payloads = con.payload(hash=hashes)
            # if we get a single Payload back, put it in a list and turn it into a json_list
            if payloads.__class__ != data_sources.json_list:
                payloads = data_sources.json_data_node.make([payloads])

            # close the session with the SQLite database file - we won't use it again
            con.close_session()

            # if found some Payloads, send them
            if payloads:
                # Note: there is an edge case in which the SQLite file could have been queried
                # to delete the Payloads since we queried it for IOV hashes.  This may be handled in the next iteration.
                # send http post with data blob in body, and everything else as URL parameters
                # convert Payload to a dictionary - we can put most of this into the URL of the HTTPs request
                dicts = payloads.as_dicts()
                self._outputter.write("Uploading Payload BLOBs:")

                # for each payload, send the BLOB to the server
                for n, payload in enumerate(dicts):
                    self._outputter.write(
                        "\t(%d/%d) Sending payload with hash '%s'." %
                        (n + 1, len(dicts), payload["hash"]))
                    response = self.send_blob(payload, upload_session_id)
                    # check response for errors
                    no_error = self.check_response_for_error_key(
                        response, exit_if_error=True)
                    if not (no_error):
                        return False
                    self._outputter.write(
                        "\tPayload sent - moving to next one.")
                self._outputter.write("All Payloads uploaded.")
                return True
            else:
                return False
예제 #8
0
파일: shell.py 프로젝트: UVa-IHEP/cmg-cmssw
def connect(connection_data=None):
    if connection_data == None:
        connection_data = {
            "db_alias": "orapro",
            "schema": "cms_conditions",
            "host": "oracle",
            "secrets": "/afs/cern.ch/cms/DB/conddb/.cms_cond/netrc"
        }
    connection = querying.connect(connection_data)
    return connection
예제 #9
0
def diff_of_gts(arguments):
	# get a CondDBFW Global Tag object for the first GT
	# then use the diff() method to draw the table of differences

	# set up connection
	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	gt1 = connection.global_tag(name=arguments.gt1)
	gt2 = connection.global_tag(name=arguments.gt2)

	gt1.diff(gt2).as_table(columns=["Record", "Label", "%s Tag" % arguments.gt1, "%s Tag" % arguments.gt2])
예제 #10
0
def diff_of_tags(arguments):
	# get a CondDBFW Tag object for the first tag
	# then use the diff() method to draw the table of differences

	# set up connection
	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	tag1 = connection.tag(name=arguments.tag1)
	tag2 = connection.tag(name=arguments.tag2)

	tag1.diff(tag2).as_table(columns=["since", arguments.tag1, arguments.tag2])
예제 #11
0
def diff_of_gts(arguments):
	# get a CondDBFW Global Tag object for the first GT
	# then use the diff() method to draw the table of differences

	# set up connection
	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	gt1 = connection.global_tag(name=arguments.gt1)
	gt2 = connection.global_tag(name=arguments.gt2)

	gt1.diff(gt2).as_table(columns=["Record", "Label", "%s Tag" % arguments.gt1, "%s Tag" % arguments.gt2])
예제 #12
0
def diff_of_tags(arguments):
	# get a CondDBFW Tag object for the first tag
	# then use the diff() method to draw the table of differences

	# set up connection
	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	tag1 = connection.tag(name=arguments.tag1)
	tag2 = connection.tag(name=arguments.tag2)

	tag1.diff(tag2).as_table(columns=["since", arguments.tag1, arguments.tag2])
예제 #13
0
def connect(connection_data=None,
            mode="r",
            map_blobs=False,
            secrets=None,
            pooling=True):
    if connection_data == None:
        connection_data = "frontier://FrontierProd/CMS_CONDITIONS"
    connection = querying.connect(connection_data,
                                  mode=mode,
                                  map_blobs=map_blobs,
                                  secrets=secrets,
                                  pooling=pooling)
    connections.append(connection)
    return connection
예제 #14
0
	def send_payloads(self, hashes, upload_session_id):
		"""
		Send a list of payloads corresponding to hashes we got from the SQLite file and filtered by asking the server.
		"""
		# if we have no hashes, we can't send anything
		# but don't exit since it might mean all the Payloads were already on the server
		if len(hashes) == 0:
			self._outputter.write("No hashes to send - moving to metadata upload.")
			return True
		else:
			self._outputter.write("Sending payloads of hashes not found:")
			# construct connection string for local SQLite database file
			database = ("sqlite://%s" % os.path.abspath(self.sqlite_file_name)) if isinstance(self.sqlite_file_name, str) else self.sqlite_file_name
			# create CondDBFW connection that maps blobs - as we need to query for payload BLOBs (disabled by default in CondDBFW)
			self._outputter.write("\tConnecting to input SQLite database.")
			con = querying.connect(database, map_blobs=True)

			# query for the Payloads
			self._outputter.write("\tGetting Payloads from SQLite database based on list of hashes.")
			payloads = con.payload(hash=hashes)
			# if we get a single Payload back, put it in a list and turn it into a json_list
			if payloads.__class__ != data_sources.json_list:
				payloads = data_sources.json_data_node.make([payloads])

			# close the session with the SQLite database file - we won't use it again
			con.close_session()

			# if found some Payloads, send them
			if payloads:
				# Note: there is an edge case in which the SQLite file could have been queried
				# to delete the Payloads since we queried it for IOV hashes.  This may be handled in the next iteration.
				# send http post with data blob in body, and everything else as URL parameters
				# convert Payload to a dictionary - we can put most of this into the URL of the HTTPs request
				dicts = payloads.as_dicts()
				self._outputter.write("Uploading Payload BLOBs:")

				# for each payload, send the BLOB to the server
				for n, payload in enumerate(dicts):
					self._outputter.write("\t(%d/%d) Sending payload with hash '%s'." % (n+1, len(dicts), payload["hash"]))
					response = self.send_blob(payload, upload_session_id)
					# check response for errors
					no_error = self.check_response_for_error_key(response, exit_if_error=True)
					if not(no_error):
						return False
					self._outputter.write("\tPayload sent - moving to next one.")
				self._outputter.write("All Payloads uploaded.")
				return True
			else:
				return False
예제 #15
0
def list_object(arguments):

    # set up connection
    connection = querying.connect(arguments.db,
                                  secrets=arguments.secrets,
                                  mode=arguments.mode)

    options = ["tag", "gt", "gts_for_tag"]
    number_of_options_given = 0
    for option in options:
        if getattr(arguments, option):
            number_of_options_given += 1
    if number_of_options_given != 1:
        print("You must specify a single object to list.")
        exit()

    if arguments.tag:
        tag_name = arguments.tag
        tag = connection.tag(name=tag_name)
        if tag:
            iovs = tag.iovs(amount=arguments.limit)
            iovs.as_table()
        else:
            print("The Tag '%s' was not found in the database '%s'." %
                  (tag_name, arguments.db))
            exit()

    elif arguments.gt:
        gt_name = arguments.gt
        gt = connection.global_tag(name=gt_name)
        if gt:
            gt_maps = gt.tags(amount=arguments.limit)
            gt_maps.as_table(hide=["global_tag_name"])
        else:
            print("The Global Tag '%s' was not found in the database '%s'." %
                  (gt_name, arguments.db))
            exit()

    elif arguments.gts_for_tag:
        tag_name = arguments.gts_for_tag
        tag = connection.tag(name=tag_name)
        gts = tag.parent_global_tags(amount=arguments.limit)
        gts.as_table(columns=["name", "insertion_time", "snapshot_time"])
예제 #16
0
def list_object(arguments):

	# set up connection
	connection = querying.connect(arguments.db, secrets=arguments.secrets, mode=arguments.mode)

	options = ["tag", "gt", "gts_for_tag"]
	number_of_options_given = 0
	for option in options:
		if getattr(arguments, option):
			number_of_options_given += 1
	if number_of_options_given != 1:
		print("You must specify a single object to list.")
		exit()

	if arguments.tag:
		tag_name = arguments.tag
		tag = connection.tag(name=tag_name)
		if tag:
			iovs = tag.iovs(amount=arguments.limit)
			iovs.as_table()
		else:
			print("The Tag '%s' was not found in the database '%s'." % (tag_name, arguments.db))
			exit()

	elif arguments.gt:
		gt_name = arguments.gt
		gt = connection.global_tag(name=gt_name)
		if gt:
			gt_maps = gt.tags(amount=arguments.limit)
			gt_maps.as_table(hide=["global_tag_name"])
		else:
			print("The Global Tag '%s' was not found in the database '%s'." % (gt_name, arguments.db))
			exit()

	elif arguments.gts_for_tag:
		tag_name = arguments.gts_for_tag
		tag = connection.tag(name=tag_name)
		gts = tag.parent_global_tags(amount=arguments.limit)
		gts.as_table(columns=["name", "insertion_time", "snapshot_time"])
예제 #17
0
파일: shell.py 프로젝트: Andrej-CMS/cmssw
def connect(connection_data=None):
	if connection_data == None:
		connection_data = {"db_alias":"orapro", "schema" : "cms_conditions", "host":"oracle", "secrets":"/afs/cern.ch/cms/DB/conddb/.cms_cond/netrc"}
	connection = querying.connect(connection_data)
	return connection
예제 #18
0
	def __init__(self, metadata_source=None, debug=False, verbose=False, testing=False, server="https://cms-conddb-dev.cern.ch/cmsDbCondUpload/", **kwargs):
		"""
		Upload constructor:
		Given an SQLite file and a Metadata sources, reads into a dictionary read for it to be encoded and uploaded.

		Note: kwargs is used to capture stray arguments - arguments that do not match keywords will not be used.

		Note: default value of service_url should be changed for production.
		"""
		# set private variables
		self._debug = debug
		self._verbose = verbose
		self._testing = testing
		# initialise server-side log data as empty string - will be replaced when we get a response back from the server
		self._log_data = ""
		self._SERVICE_URL = server
		self.upload_session_id = None

		# set up client-side log file
		self.upload_log_file_name = "upload_logs/upload_log_%d" % new_log_file_id()
		self._handle = open(self.upload_log_file_name, "a")

		# set up client-side logging object
		self._outputter = output(verbose=verbose, log_handle=self._handle)
		self._outputter.write("Using server instance at '%s'." % self._SERVICE_URL)

		# expect a CondDBFW data_source object for metadata_source
		if metadata_source == None:
			# no upload metadat has been given - we cannot continue with the upload
			self.exit_upload("A source of metadata must be given so CondDBFW knows how to upload conditions.")
		else:
			# set up global metadata source variable
			self.metadata_source = metadata_source.data()

		# check for the destination tag
		# this is required whatever type of upload we're performing
		if self.metadata_source.get("destinationTags") == None:
			self.exit_upload("No destination Tag was given.")
		else:
			if isinstance(self.metadata_source.get("destinationTags"), dict) and self.metadata_source.get("destinationTags").keys()[0] == None:
				self.exit_upload("No destination Tag was given.")

		# make sure a destination database was given
		if self.metadata_source.get("destinationDatabase") == None:
			self.exit_upload("No destination database was given.")

		# get Conditions metadata
		if self.metadata_source.get("sourceDB") == None and self.metadata_source.get("hashToUse") == None:
			"""
			If we have neither an sqlite file nor the command line data
			"""
			self.exit_upload("You must give either an SQLite database file, or the necessary command line arguments to replace one."\
							+ "\nSee --help for command line argument information.")
		elif self.metadata_source.get("sourceDB") != None:
			"""
			We've been given an SQLite file, so try to extract Conditions Metadata based on that and the Upload Metadata in metadata_source
			We now extract the Tag and IOV data from SQLite.  It is added to the dictionary for sending over HTTPs later.
			"""

			# make sure we have an input tag to look for in the source db
			self.input_tag = metadata_source.data().get("inputTag")
			if self.input_tag == None:
				self.exit_upload("No input Tag name was given.")

			# set empty dictionary to contain Tag and IOV data from SQLite
			result_dictionary = {}
			self.sqlite_file_name = self.metadata_source["sourceDB"]
			if not(os.path.isfile(self.sqlite_file_name)):
				self.exit_upload("SQLite file '%s' given doesn't exist." % self.sqlite_file_name)
			sqlite_con = querying.connect("sqlite://%s" % os.path.abspath(self.sqlite_file_name))

			self._outputter.write("Getting Tag and IOVs from SQLite database.")

			# query for Tag, check for existence, then convert to dictionary
			tag = sqlite_con.tag(name=self.input_tag)
			if tag == None:
				self.exit_upload("The source Tag '%s' you gave was not found in the SQLite file." % self.input_tag)
			tag = tag.as_dicts(convert_timestamps=True)

			# query for IOVs, check for existence, then convert to dictionaries
			iovs = sqlite_con.iov(tag_name=self.input_tag)
			if iovs == None:
				self.exit_upload("No IOVs found in the SQLite file given for Tag '%s'." % self.input_tag)
			iovs = iovs.as_dicts(convert_timestamps=True)
			iovs = [iovs] if not isinstance(iovs, list) else iovs

			"""
			Finally, get the list of all Payload hashes of IOVs,
			then compute the list of hashes for which there is no Payload for
			this is used later to decide if we can continue the upload if the Payload was not found on the server.
			"""
			iovs_for_hashes = sqlite_con.iov(tag_name=self.input_tag)
			if iovs_for_hashes.__class__ == data_sources.json_list:
				hashes_of_iovs = iovs_for_hashes.get_members("payload_hash").data()
			else:
				hashes_of_iovs = [iovs_for_hashes.payload_hash]
			self.hashes_with_no_local_payload = [payload_hash for payload_hash in hashes_of_iovs if sqlite_con.payload(hash=payload_hash) == None]

			# close session open on SQLite database file
			sqlite_con.close_session()

		elif metadata_source.data().get("hashToUse") != None:
			"""
			Assume we've been given metadata in the command line (since no sqlite file is there, and we have command line arguments).
			We now use Tag and IOV data from command line.  It is added to the dictionary for sending over HTTPs later.
			"""

			# set empty dictionary to contain Tag and IOV data from command line
			result_dictionary = {}

			now = to_timestamp(datetime.now())
			# tag dictionary will be taken from the server
			# this does not require any authentication
			tag = self.get_tag_dictionary()
			self.check_response_for_error_key(tag)
			iovs = [{"tag_name" : self.metadata_source["destinationTag"], "since" : self.metadata_source["since"], "payload_hash" : self.metadata_source["hashToUse"],\
					"insertion_time" : now}]

			# hashToUse cannot be stored locally (no sqlite file is given), so register it as not found
			self.hashes_with_no_local_payload = [self.metadata_source["hashToUse"]]

			# Note: normal optimisations will still take place - since the hash checking stage can tell if hashToUse does not exist on the server side

		# if the source Tag is run-based, convert sinces to lumi-based sinces with lumi-section = 0
		if tag["time_type"] == "Run":
			for (i, iov) in enumerate(iovs):
				iovs[i]["since"] = iovs[i]["since"] << 32

		result_dictionary = {"inputTagData" : tag, "iovs" : iovs}

		# add command line arguments to dictionary
		# remembering that metadata_source is a json_dict object
		result_dictionary.update(metadata_source.data())

		# store in instance variable
		self.data_to_send = result_dictionary

		# if the since doesn't exist, take the first since from the list of IOVs
		if result_dictionary.get("since") == None:
			result_dictionary["since"] = sorted(iovs, key=lambda iov : iov["since"])[0]["since"]
		elif self.data_to_send["inputTagData"]["time_type"] == "Run":
			# Tag time_type says IOVs use Runs for sinces, so we convert to Lumi-based for uniform processing
			self.data_to_send["since"] = self.data_to_send["since"] << 32

		"""
		TODO - Settle on a single destination tag format.
		"""
		# look for deprecated metadata entries - give warnings
		# Note - we only really support this format
		try:
			if isinstance(result_dictionary["destinationTags"], dict):
				self._outputter.write("WARNING: Multiple destination tags in a single metadata source is deprecated.")
		except Exception as e:
			self._outputter.write("ERROR: %s" % str(e))
예제 #19
0
    def __init__(self,
                 metadata_source=None,
                 debug=False,
                 verbose=False,
                 testing=False,
                 server="https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
                 **kwargs):
        """
		Upload constructor:
		Given an SQLite file and a Metadata sources, reads into a dictionary read for it to be encoded and uploaded.

		Note: kwargs is used to capture stray arguments - arguments that do not match keywords will not be used.

		Note: default value of service_url should be changed for production.
		"""
        # set private variables
        self._debug = debug
        self._verbose = verbose
        self._testing = testing
        # initialise server-side log data as empty string - will be replaced when we get a response back from the server
        self._log_data = ""
        self._SERVICE_URL = server
        self.upload_session_id = None

        # set up client-side log file
        self.upload_log_file_name = "upload_logs/upload_log_%d" % new_log_file_id(
        )
        self._handle = open(self.upload_log_file_name, "a")

        # set up client-side logging object
        self._outputter = output(verbose=verbose, log_handle=self._handle)
        self._outputter.write("Using server instance at '%s'." %
                              self._SERVICE_URL)

        # expect a CondDBFW data_source object for metadata_source
        if metadata_source == None:
            # no upload metadat has been given - we cannot continue with the upload
            self.exit_upload(
                "A source of metadata must be given so CondDBFW knows how to upload conditions."
            )
        else:
            # set up global metadata source variable
            self.metadata_source = metadata_source.data()

        # check for the destination tag
        # this is required whatever type of upload we're performing
        if self.metadata_source.get("destinationTags") == None:
            self.exit_upload("No destination Tag was given.")
        else:
            if type(self.metadata_source.get(
                    "destinationTags")) == dict and self.metadata_source.get(
                        "destinationTags").keys()[0] == None:
                self.exit_upload("No destination Tag was given.")

        # make sure a destination database was given
        if self.metadata_source.get("destinationDatabase") == None:
            self.exit_upload("No destination database was given.")

        # get Conditions metadata
        if self.metadata_source.get(
                "sourceDB") == None and self.metadata_source.get(
                    "hashToUse") == None:
            """
			If we have neither an sqlite file nor the command line data
			"""
            self.exit_upload("You must give either an SQLite database file, or the necessary command line arguments to replace one."\
                + "\nSee --help for command line argument information.")
        elif self.metadata_source.get("sourceDB") != None:
            """
			We've been given an SQLite file, so try to extract Conditions Metadata based on that and the Upload Metadata in metadata_source
			We now extract the Tag and IOV data from SQLite.  It is added to the dictionary for sending over HTTPs later.
			"""

            # make sure we have an input tag to look for in the source db
            self.input_tag = metadata_source.data().get("inputTag")
            if self.input_tag == None:
                self.exit_upload("No input Tag name was given.")

            # set empty dictionary to contain Tag and IOV data from SQLite
            result_dictionary = {}
            self.sqlite_file_name = self.metadata_source["sourceDB"]
            if not (os.path.isfile(self.sqlite_file_name)):
                self.exit_upload("SQLite file '%s' given doesn't exist." %
                                 self.sqlite_file_name)
            sqlite_con = querying.connect(
                "sqlite://%s" % os.path.abspath(self.sqlite_file_name))

            self._outputter.write("Getting Tag and IOVs from SQLite database.")

            # query for Tag, check for existence, then convert to dictionary
            tag = sqlite_con.tag(name=self.input_tag)
            if tag == None:
                self.exit_upload(
                    "The source Tag '%s' you gave was not found in the SQLite file."
                    % self.input_tag)
            tag = tag.as_dicts(convert_timestamps=True)

            # query for IOVs, check for existence, then convert to dictionaries
            iovs = sqlite_con.iov(tag_name=self.input_tag)
            if iovs == None:
                self.exit_upload(
                    "No IOVs found in the SQLite file given for Tag '%s'." %
                    self.input_tag)
            iovs = iovs.as_dicts(convert_timestamps=True)
            iovs = [iovs] if type(iovs) != list else iovs
            """
			Finally, get the list of all Payload hashes of IOVs,
			then compute the list of hashes for which there is no Payload for
			this is used later to decide if we can continue the upload if the Payload was not found on the server.
			"""
            iovs_for_hashes = sqlite_con.iov(tag_name=self.input_tag)
            if iovs_for_hashes.__class__ == data_sources.json_list:
                hashes_of_iovs = iovs_for_hashes.get_members(
                    "payload_hash").data()
            else:
                hashes_of_iovs = [iovs_for_hashes.payload_hash]
            self.hashes_with_no_local_payload = [
                payload_hash for payload_hash in hashes_of_iovs
                if sqlite_con.payload(hash=payload_hash) == None
            ]

            # close session open on SQLite database file
            sqlite_con.close_session()

        elif metadata_source.data().get("hashToUse") != None:
            """
			Assume we've been given metadata in the command line (since no sqlite file is there, and we have command line arguments).
			We now use Tag and IOV data from command line.  It is added to the dictionary for sending over HTTPs later.
			"""

            # set empty dictionary to contain Tag and IOV data from command line
            result_dictionary = {}

            now = to_timestamp(datetime.now())
            # tag dictionary will be taken from the server
            # this does not require any authentication
            tag = self.get_tag_dictionary()
            self.check_response_for_error_key(tag)
            iovs = [{"tag_name" : self.metadata_source["destinationTag"], "since" : self.metadata_source["since"], "payload_hash" : self.metadata_source["hashToUse"],\
              "insertion_time" : now}]

            # hashToUse cannot be stored locally (no sqlite file is given), so register it as not found
            self.hashes_with_no_local_payload = [
                self.metadata_source["hashToUse"]
            ]

            # Note: normal optimisations will still take place - since the hash checking stage can tell if hashToUse does not exist on the server side

        # if the source Tag is run-based, convert sinces to lumi-based sinces with lumi-section = 0
        if tag["time_type"] == "Run":
            for (i, iov) in enumerate(iovs):
                iovs[i]["since"] = iovs[i]["since"] << 32

        result_dictionary = {"inputTagData": tag, "iovs": iovs}

        # add command line arguments to dictionary
        # remembering that metadata_source is a json_dict object
        result_dictionary.update(metadata_source.data())

        # store in instance variable
        self.data_to_send = result_dictionary

        # if the since doesn't exist, take the first since from the list of IOVs
        if result_dictionary.get("since") == None:
            result_dictionary["since"] = sorted(
                iovs, key=lambda iov: iov["since"])[0]["since"]
        elif self.data_to_send["inputTagData"]["time_type"] == "Run":
            # Tag time_type says IOVs use Runs for sinces, so we convert to Lumi-based for uniform processing
            self.data_to_send["since"] = self.data_to_send["since"] << 32
        """
		TODO - Settle on a single destination tag format.
		"""
        # look for deprecated metadata entries - give warnings
        # Note - we only really support this format
        try:
            if type(result_dictionary["destinationTags"]) == dict:
                self._outputter.write(
                    "WARNING: Multiple destination tags in a single metadata source is deprecated."
                )
        except Exception as e:
            self._outputter.write("ERROR: %s" % str(e))
예제 #20
0
def connect(connection_data=None, mode="r", map_blobs=False, secrets=None, pooling=True):
	if connection_data == None:
		connection_data = "frontier://FrontierProd/CMS_CONDITIONS"
	connection = querying.connect(connection_data, mode=mode, map_blobs=map_blobs, secrets=secrets, pooling=pooling)
	connections.append(connection)
	return connection