Exemplo n.º 1
0
def get_all():
    client = datastore.Client()
    query = client.query(kind='Account')
    data = list(query.fetch())
    for entity in data:
        entity['id'] = entity.key.id
    return data
Exemplo n.º 2
0
    def _collect_metric_data(self):

        client = InfluxDBClient('188.166.238.158', 32485, 'root', 'root', 'k8s')
        result = client.query('SELECT sum("value") FROM "memory/usage" WHERE "type" = \'node\' AND time > now() - 1d GROUP BY time(1d), "nodename" fill(null);')
        # ('memory/usage', {'nodename': '128.199.242.5'}) - - [{'sum': 1275429384192, 'time': '2017-02-25T00:00:00Z'},
        #                                                      {'sum': 1038484692992, 'time': '2017-02-26T00:00:00Z'}]
        return
Exemplo n.º 3
0
def get_all():
    client = datastore.Client()
    query = client.query(kind='Greeting')
    query.order = '-created'
    greetings = list(query.fetch())
    for entity in greetings:
        entity['id'] = entity.key.id
    return greetings
Exemplo n.º 4
0
def get_comments(parent_id):
    client = datastore.Client()
    ancestor = client.key('Greeting', int(parent_id))
    query = client.query(kind='Comment', ancestor=ancestor)
    entities = list(query.fetch())
    for entity in entities:
        entity['id'] = entity.key.id
    return entities
Exemplo n.º 5
0
def get_data(parent_id):
    client = datastore.Client()
    ancestor = client.key('ScrapingDataSummary', int(parent_id))
    query = client.query(kind='ScrapingDataDetails', ancestor=ancestor)
    entities = list(query.fetch())
    for entity in entities:
        entity['id'] = entity.key.id
    return entities
Exemplo n.º 6
0
def get_all():
    client = datastore.Client()
    query = client.query(kind='TodoList')
    query.order = '-created'
    data = list(query.fetch())
    for entity in data:
        entity['id'] = entity.key.id
    return data
Exemplo n.º 7
0
def get_cpu_usage(container_name):
    """Return cpu usage of container_name

	@param string container_name container name  
	"""
    query = "select DERIVATIVE(cpu_cumulative_usage)  as cpu_usage from stats where container_name = '" + container_name + "' and time > now()-5m group by time(10s) "
    result = client.query(query)
    points = result[0]["points"]
    return points[0][1] / 1000000000 / 4 * 100
Exemplo n.º 8
0
def queryDataMax(datafield, minutes):
    max = 0
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit '+str(minutes))
    for p in results.get_points():
        value = p[str(datafield)]
        if(max < value):
            max =  value
    print(str(datafield)+" maximum over "+str(minutes)+" min: "+str(max))
    return max
Exemplo n.º 9
0
def get_container_name(mesos_task_id):
	"""Return container name mapping with mesos_task_id in messos
	
	@param string mesos_task_id
	"""
	query = "select container_name from "+INFLUXDB["ts_mapping"]+" where time>now() - 5m and mesos_task_id = '" +mesos_task_id+"' limit 1" 
	result = client.query(query)
	points = result[0]["points"]
	return points[0][2]
Exemplo n.º 10
0
def queryDataAverage(datafield, minutes):
    total = 0
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit '+str(minutes))
    for p in results.get_points():
        value = p[str(datafield)]
        total = total + value
    average = total/minutes
    print(str(datafield)+" average over "+str(minutes)+" min: "+str(average))
    return average
Exemplo n.º 11
0
def get_cpu_usage(container_name):
	"""Return cpu usage of container_name

	@param string container_name container name  
	"""
	query = "select DERIVATIVE(cpu_cumulative_usage)  as cpu_usage from stats where container_name = '"+container_name+"' and time > now()-5m group by time(10s) "
	result = client.query(query)
	points = result[0]["points"]
	return points[0][1]/1000000000/4*100
Exemplo n.º 12
0
def isFieldAbove(datafield, minutes, threshold):
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit '+str(minutes))
    for p in results.get_points():
        value = p[str(datafield)]
        if value < threshold:
            print(str(datafield)+" has NOT been above: "+str(threshold)+" for over "+str(minutes)+" min "+str(getLastValue(str(datafield)))[:4])
            return False
    print(str(datafield)+" has been above: "+str(threshold)+" for over "+str(minutes)+" min "+str(getLastValue(str(datafield)))[:4])
    return True
Exemplo n.º 13
0
def getContainersName(app_name):
	query = "select DISTINCT(mesos_task_id) from mapping where time > now() - 5m"
	result = client.query(query)
	points = result[0]["points"]
	containers_name = []
	for point in points:
		mesos_task_id = point[1]
		if (mesos_task_id.find(app_name, 0) > -1):
			containers_name.append(getContainerName(point[1]))
	return containers_name
Exemplo n.º 14
0
def get_container_name(mesos_task_id):
    """Return container name mapping with mesos_task_id in messos
	
	@param string mesos_task_id
	"""
    query = "select container_name from " + INFLUXDB[
        "ts_mapping"] + " where time>now() - 5m and mesos_task_id = '" + mesos_task_id + "' limit 1"
    result = client.query(query)
    points = result[0]["points"]
    return points[0][2]
Exemplo n.º 15
0
def wolfsearch(q):
    app_id = 'Q77HTY-A9VGRYRKK6'
    client = wolframalpha.Client(app_id)
    res = client.query(q)
    if res['@success'] == 'false':
        return ('1')
    else:
        try:
            return next(res.results).text
        except:
            return ('1')
Exemplo n.º 16
0
def isFieldAbovePercentage(datafield, minutes, threshold):
    countAbove = 0
    #countBelow = 0
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit '+str(minutes))
    for p in results.get_points():
        value = p[str(datafield)]
        #if value < threshold:
        #    countBelow = countBelow + 1
        if value > threshold:
            countAbove = countAbove + 1
    #print(str(datafield)+" below "+str(threshold)+": "+str(countBelow/minutes)+"% of the time for the last "+str(minutes)+" min")
    #print(str(datafield)+" above "+str(threshold)+": "+str(countAbove/minutes)+"% of the time for the last "+str(minutes)+" min")
    print(str(datafield)+" has been below "+str(threshold)+": for over "+str(minutes)+" min "+str(countAbove/minutes)+"% of the time")
    return countAbove/minutes
Exemplo n.º 17
0
def DBcon(DevEUIlist):
    client = InfluxDBClient(host='localhost', port=8086)
    client.switch_database('op5_test')

    callBefore = "SELECT value FROM temperature, humidity, light, motion, soundAvg, soundPeak, vdd, LrrLAT, LrrLON WHERE "
    callDynamic = ""
    for item in DevEUIlist:
        print(item.get('devEUI'))
        callDynamic += "DevEUI = '" + item.get('devEUI') + "' OR "
    callDynamic = callDynamic[:-3]
    callAfter = "ORDER BY ASC LIMIT 1"
    callFinal = callBefore + callDynamic + callAfter
    print(callFinal)
    results = client.query(callFinal)
    return results.raw
Exemplo n.º 18
0
def avg_cpu_usage(containers_name):
	"""Return avg cpu usage of all containers in list containers_name
	
	@param list containers_name list containers name
	@return float avg cpu usage
	"""
	number_container = len(containers_name)
	containers_name = ["'"+x+"'" for x in containers_name]
	containers_name = ",".join(containers_name)
	query = "select DERIVATIVE(cpu_cumulative_usage)  as cpu_usage,container_name from stats where  time > now()-5m and  container_name in ("+containers_name+") group by time(10s),container_name limit "+str(number_container)
	result = client.query(query)
	points = result[0]["points"]
	return points[0][1]/1000000000/4*100

	sum_cpu_usage = 0
	for point in points:
		sum_cpu_usage += points[0][1]/1000000000/4*100
	return sum_cpu_usage / number_container
Exemplo n.º 19
0
def avg_cpu_usage(containers_name):
    """Return avg cpu usage of all containers in list containers_name
	
	@param list containers_name list containers name
	@return float avg cpu usage
	"""
    number_container = len(containers_name)
    containers_name = ["'" + x + "'" for x in containers_name]
    containers_name = ",".join(containers_name)
    query = "select DERIVATIVE(cpu_cumulative_usage)  as cpu_usage,container_name from stats where  time > now()-5m and  container_name in (" + containers_name + ") group by time(10s),container_name limit " + str(
        number_container)
    result = client.query(query)
    points = result[0]["points"]
    return points[0][1] / 1000000000 / 4 * 100

    sum_cpu_usage = 0
    for point in points:
        sum_cpu_usage += points[0][1] / 1000000000 / 4 * 100
    return sum_cpu_usage / number_container
Exemplo n.º 20
0
def queryDataField(datafield, minutes):
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit '+str(minutes))
    return results
Exemplo n.º 21
0
def queryDataMinutes(minutes):
    results = client.query('select * from data ORDER BY "time" DESC limit '+str(minutes))
    return results
Exemplo n.º 22
0
def queryWaterCount(datafield, minutes):
    value = 0
    results = client.query('select sum('+str(datafield)+') from water limit '+str(minutes))
    for p in results.get_points():
        value = p["sum"]
    return value
Exemplo n.º 23
0
# Establishing connec
client = InfluxDBClient(host='localhost',
                        port=8086,
                        username='******',
                        password='******',
                        database='_internal')

# The list of databases in influxDB
result = client.get_list_database()
print("Result: {0}".format(result))

client.create_database('telegraf')
client.switch_database('telegraf')

rs = client.query("SELECT * from cpu;")
cpu_points = list(rs.get_points(measurement='cpu'))

cpuu = psutil.cpu_times(percpu=True)
print(cpuu)
# Convert each namedTuple to a json string
cpuu = [json.dumps(stats._asdict()) for stats in cpuu]
print("Result: {0}".format(cpuu))
print(cpuu)

now = datetime.datetime.today()
timeinterval_min = 5  # create an event every x minutes
series = []

for i in range(0, len(cpuu)):
    past_date = now - datetime.timedelta(minutes=1 * timeinterval_min)
Exemplo n.º 24
0
async def askTure(ctx, *, arg):

    # init client
    client = wolframalpha.Client(wolframKey)

    # send first message to user
    await ctx.channel.send(askTureMessages[random.randint(
        0,
        len(askTureMessages) - 1)])

    # query wolfram alpha
    result = client.query(arg)
    # get the main pod of data
    resultPods = result.get("pod")
    message = []
    titleAmount = 0
    images = []
    try:
        # for each pod add pod title and images from its sub pods
        for pod in resultPods:
            message.append(pod.get("@title"))
            titleAmount += 1
            subpod = pod.get("subpod")
            if (type(subpod) == list):
                for childpod in subpod:
                    response = requests.get(childpod.get("img").get("@src"))
                    img = Image.open(BytesIO(response.content))
                    images.append(img)
                    message.append(img)
            elif (type(subpod) == dict):
                response = requests.get(subpod.get("img").get("@src"))
                img = Image.open(BytesIO(response.content))
                images.append(img)
                message.append(img)

        # adjust final image height and width
        widths, heights = zip(*(i.size for i in images))
        totalHeight = sum(heights) + titleAmount * 18
        maxWidth = max(widths)
        newImage = Image.new('RGB', (maxWidth, totalHeight), color="white")
        yOffset = 0
        imageDraw = ImageDraw.Draw(newImage)
        # Create font and add titles to final images
        font = ImageFont.truetype("Verdana.ttf", 14)
        for field in message:
            if type(field) == str:
                imageDraw.text((5, yOffset), field, font=font, fill="black")
                yOffset += 18
            else:
                newImage.paste(field, (0, yOffset))
                yOffset += field.size[1]

        # create temp file name
        tempFileName = "{}.png".format(random.randint(1, 10000))
        # save temp file
        newImage.save(tempFileName, 'PNG')
        imagePath = "{}/{}".format(str(pathlib.Path().absolute()),
                                   tempFileName)
        # create embed and set file & image
        embed = discord.Embed(title="Här har du, lämna mig ifred nu",
                              color=0x00ff00)  #creates embed
        f = discord.File(imagePath, filename=tempFileName)
        embed.set_image(url="attachment://{}".format(tempFileName))

        # send image and remove temp file
        await ctx.send(file=f, embed=embed)
        os.remove(imagePath)

    except:
        await ctx.channel.send(askTureInfoNotFoundMessages[random.randint(
            0,
            len(askTureInfoNotFoundMessages) - 1)])
Exemplo n.º 25
0
def getLastValue(datafield):
    results = client.query('select '+str(datafield)+' from data ORDER BY "time" DESC limit 1')
    for p in results.get_points():
        value = p[str(datafield)]
    #print(str(datafield)+" last value: "+str(value))
    return value
Exemplo n.º 26
0
def getContainerName(mesos_task_id):
	query = "select container_name from mapping where time>now() - 5m and mesos_task_id = '" +mesos_task_id+"' limit 1" 
	result = client.query(query)
	points = result[0]["points"]
	return points[0][2]