Esempio n. 1
0
def get_requests_data(connection_string, FROM, TO):
	db = cpyutils.db.DB.create_from_string(connection_string)

	# We'll get the max and min timestamp, and so check whether we have access to the db or not
	max_timestamp = 0
	result, row_count, rows = db.sql_query("select max(timestamp_created),min(timestamp_created) from requests")

	if result:
		max_timestamp = rows[0][0]
		min_timestamp = rows[0][1]
	else:
		raise Exception("failed to read from the database")

	# Now correct the values of TO and FROM
	if TO == 0:
		TO = max_timestamp
	elif TO < 0:
		TO = max_timestamp + TO

	if FROM < 0:
		FROM = TO + FROM
	if FROM < 0:
		FROM = 0
	if FROM < min_timestamp:
		FROM = min_timestamp

	# Just in case that there are no requests
	if FROM is None:
		FROM = 0
	if TO is None:
		TO = 0

	# Finally get the data from the database
	result, row_count, rows = db.sql_query("select * from requests where timestamp_created >= %d and timestamp_created <= %d order by timestamp_created" % (FROM, TO) )

	requests = []
	if result:
		# Read the data from the database and create the data structure (Stats)
		for (reqid, timestamp_created, timestamp_state, state, slots, memory, expressions, taskcount, maxtaskspernode, jobid, nodes, x) in rows:
			requests.append(\
			{
				"id": reqid,\
				"t_created": timestamp_created,\
				"state": state,\
				"t_state": timestamp_state,\
				"slots": slots,\
				"memory": memory,\
				"requirements": expressions,\
				"taskcount": taskcount,\
				"maxtaskspernode": maxtaskspernode,\
				"jobid": jobid,\
				"nodes": nodes\
			}\
			)
		return requests, min_timestamp, max_timestamp
	else:
		return None, None, None
Esempio n. 2
0
    def get_data_from_db(self):
        if self._dbstring is None:
            return False, ""

        db = cpyutils.db.DB.create_from_string(self._dbstring)
        if db is not None:
            success, rowcount, rows = db.sql_query("select * from endpoint", True)
            if success:
                bad_eps = []
                for row in rows:
                    _id, public_ip, public_port, private_ip, private_port, timestamp = row
                    ep = Endpoint(public_ip, public_port, private_ip, private_port)
                    ep.id = _id
                    ep.timestamp = timestamp
                    _LOGGER.debug("reading endpoint %s from the database" % ep)
                    result, msg = self.apply_endpoint(ep, False)
                    if not result:
                        _LOGGER.warning("ignoring (%s)" % msg)
                        bad_eps.append(ep)

                # Cleaning bad endpoints
                for ep in bad_eps:
                    self._unsave_endpoint(ep)

                return True, "Reading %d endpoints" % rowcount
            else:
                return False, "Could not read endpoints from database"
        else:
            _LOGGER.warning("Not using a database due to errors in the connection")
            self._dbstring = None

            return False, _LOGGER.warning("could not create a connection to the databse")
Esempio n. 3
0
    def _initialize_db(self):
        if (self._dbstring == "") or (self._dbstring is None):
            return False, _LOGGER.info("not using a database (no db connection string provided)")
        db = cpyutils.db.DB.create_from_string(self._dbstring)
        if db is not None:
            succes, _, _ = db.sql_query(
                "create table if not exists endpoint (id, public_ip, public_port, private_ip, private_port, timestamp)",
                True,
            )
            if not succes:
                return False, _LOGGER.error("could not initialize the database")
            return True, ""
        else:
            _LOGGER.warning("Not using a database due to errors in the connection")
            self._dbstring = None

            return False, _LOGGER.warning("could not create a connection to the databse")
Esempio n. 4
0
    def _unsave_endpoint(self, ep):
        if self._dbstring is None:
            return False, ""

        db = cpyutils.db.DB.create_from_string(self._dbstring)
        if db is not None:
            success, rowcount, rows = db.sql_query(
                _LOGGER.debug(
                    'delete from endpoint where public_ip = "%s" and public_port = %d and private_ip = "%s" and private_port = %d'
                    % (ep.public_ip, ep.public_port, ep.private_ip, ep.private_port)
                ),
                True,
            )
            if success:
                return True, "Endpoint removed from the database (%s)" % ep
            else:
                return False, "Could not delete endpoint from the database"
        else:
            return False, _LOGGER.warning("could not create a connection to the databse")
Esempio n. 5
0
    def _save_endpoint(self, ep):
        if self._dbstring is None:
            return False, ""

        db = cpyutils.db.DB.create_from_string(self._dbstring)
        if db is not None:
            success, rowcount, rows = db.sql_query(
                _LOGGER.debug(
                    'insert into endpoint values ("%s", "%s", %d, "%s", %d, %ld)'
                    % (ep.id, ep.public_ip, ep.public_port, ep.private_ip, ep.private_port, ep.timestamp)
                ),
                True,
            )
            if success:
                return True, "Endpoint saved (%s)" % ep
            else:
                return False, "Could not save endpoint to the database"
        else:
            return False, _LOGGER.warning("could not create a connection to the databse")
Esempio n. 6
0
def get_reports_data(connection_string, FROM, TO):
	db = cpyutils.db.DB.create_from_string(connection_string)

	# We'll get the max and min timestamp, and so check whether we have access to the db or not
	max_timestamp = 0
	result, row_count, rows = db.sql_query("select max(timestamp),min(timestamp) from host_monitoring")

	if result:
		max_timestamp = rows[0][0]
		min_timestamp = rows[0][1]
	else:
		raise Exception("failed to read from the database")

	# Now correct the values of TO and FROM
	if TO == 0:
		TO = max_timestamp
	elif TO < 0:
		TO = max_timestamp + TO

	if FROM < 0:
		FROM = TO + FROM
	if FROM < 0:
		FROM = 0

	# Just in case that there is no monitoring information
	if FROM is None:
		FROM = 0
	if TO is None:
		TO = 0

	# Finally get the data from the database
	result, row_count, rows = db.sql_query("select * from host_monitoring where timestamp_state >= %d and timestamp_state <= %d order by timestamp_state" % (FROM, TO) )
	timeline={}
	hostnames = []

	if result:
		# Read the data from the database and create the data structure (Stats)
		for (name, timestamp_state, slots_count, slots_free, memory_total, memory_free, state, timestamp, x) in rows:
			timestamp_state = int(timestamp)
			s = Stats(slots_count, slots_free, memory_total, memory_free, state, timestamp)
			if timestamp_state not in timeline.keys():
				timeline[timestamp_state] = {}
			if name not in timeline[timestamp_state].keys():
				timeline[timestamp_state][name] = s
			if name not in hostnames:
				hostnames.append(name)

		# Get the timestamp sorted
		timesteps = timeline.keys()
		timesteps.sort()

		# Now we are filling the data blanks for each host (to get data for every host at each timestamp)
		fill_the_blanks = True
		if fill_the_blanks:
			current_values = {}
			for nname in hostnames:
				current_values[nname] = Stats(0, 0, 0, 0, 2, 0)

			for t in timesteps:
				for nname in hostnames:
					if nname not in timeline[t].keys():
						timeline[t][nname] = current_values[nname].clone(t)
					current_values[nname] = timeline[t][nname]

		# Now we are re-organizing the data, indexing by host
		hostdata = {}
		for hostname in hostnames:
			hostdata[hostname] = []

		for t in timesteps:
			for hostname in hostnames:
				if hostname in timeline[t]:
					t_s = "%d" % t
					hostdata[hostname].append(timeline[t][hostname].toJSONobj())

		return hostdata, min_timestamp, max_timestamp
	else:
		return None, None, None