예제 #1
0
 def __init__(self, start_year: float = 2017, parent_log: LogWithInflux = None):
     self.logg = LogWithInflux(parent_log, child_name=self.__class__.__name__)
     # Set temp dir for downloading the edgar filings
     self.tmp_dir = os.path.join(tempfile.gettempdir(), 'edgar')
     # Get ticker to CIK mapping
     self.logg.debug('Downloading ticket to CIK mapping...')
     self.t2cik_df = self.get_ticker_to_cik_map()
     # Download EDGAR indexes and retrieve the filepaths associated with them
     self.logg.debug('Downloading indexes (this may take ~2 mins)...')
     self.edgar_fpaths = self._download_indexes(start_year)
예제 #2
0
 def __init__(self,
              driver_path: str = '/usr/bin/chromedriver',
              timeout: float = 60,
              options: List[str] = None,
              headless: bool = True,
              parent_log: 'Log' = None):
     self.driver = ChromeDriver(driver_path, timeout, options, headless)
     self.pid = self.driver.service.process.pid
     self.port = self.driver.service.port
     self.log = LogWithInflux(parent_log,
                              child_name=self.__class__.__name__)
     self.elem_by_xpath = self.driver.find_element_by_xpath
     self.elems_by_xpath = self.driver.find_elements_by_xpath
     self.log.debug(
         f'Chromedriver started up with pid: {self.pid} receiving on port: {self.port}'
     )
 def __init__(self, bot: str = 'sasha', parent_log: LogWithInflux = None):
     creds = read_props()
     credstore = SecretStore('secretprops.kdbx',
                             creds['secretprops_secret'])
     self.log = LogWithInflux(parent_log,
                              child_name=self.__class__.__name__)
     self.st = SlackTools(credstore=credstore,
                          parent_log=self.log,
                          slack_cred_name=bot)
     self.bkb = BlockKitBuilder()
     if bot == 'sasha':
         self.hoiatuste_kanal = 'hoiatused'
         self.ilma_kanal = 'ilm'
         self.kodu_kanal = 'kodu'
         self.kaamerate_kanal = 'kaamerad'
         self.meemide_kanal = 'meemid'
         self.test_kanal = 'test'
         self.koduv6rgu_kanal = 'koduvõrk'
         self.user_me = 'U015WMFQ0DV'
         self.user_marelle = 'U016N5RJZ9C'
     elif bot == 'viktor':
         self.user_me = 'UM35HE6R5'
         self.user_marelle = 'UM3E3G72S'
예제 #4
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
from datetime import datetime as dtt
from kavalkilu import LogWithInflux, Keys, NetTools, Hosts, HOME_SERVER_HOSTNAME
from servertools import BrowserAction, SlackComm

ip = NetTools().get_ip()
debug = Hosts().get_ip_from_host(HOME_SERVER_HOSTNAME) != ip
logg = LogWithInflux('vpulse_auto')

# TODO
# build out a table of when monthly, weekly things were last done.
# Handle weekly tasks either based on DOW or recorded table with date_last_done and freq columns

# Get credentials
creds = Keys().get_key('vpulse-creds')


def message_channel_and_log(msg):
    slack_comm.send_message(notify_channel, msg)
    if debug:
        logg.debug(msg)


def get_vpoints():
    """Collects amount of points currently available"""
    points_script = "return document.getElementById('progress-bar-menu-points-total-value')"
    points = ba.driver.execute_script(points_script).get_attribute(
        'textContent').strip()
예제 #5
0
 def setUpClass(cls) -> None:
     cls.logg = LogWithInflux('cam-test', log_to_file=False)
     cam_ip = Hosts().get_ip_from_host('ac-garaaz')
     cls.cam = Amcrest(cam_ip, parent_log=cls.logg)
예제 #6
0
"""Collect forecast data"""
from datetime import datetime, timedelta
from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBHomeAuto
from servertools import OpenWeather, OWMLocation, NWSForecast, NWSForecastZone, \
    YrNoWeather, YRNOLocation

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('forecast', log_dir='weather')
influx = InfluxDBLocal(InfluxDBHomeAuto.WEATHER)
# Number of hours we're looking forward
period_h = 24
# Start & end of the lookahead
p_start = (datetime.now() + timedelta(hours=1))
p_end = (p_start + timedelta(hours=period_h))

owm_fc = OpenWeather(OWMLocation.ATX).hourly_forecast()
nws_fc = NWSForecast(NWSForecastZone.ATX).get_hourly_forecast()
yrno_fc = YrNoWeather(YRNOLocation.ATX).hourly_summary()

# Push all weather data into influx
for svc, df in zip(['own', 'nws', 'yrno'], [owm_fc, nws_fc, yrno_fc]):
    log.debug(f'Collecting data from {svc.upper()}...')
    cols = ['date', 'humidity', 'temp-avg', 'feels-temp-avg']
    if svc == 'nws':
        # Replace relative-humidity with humidity
        df['humidity'] = df['relative-humidity']
    elif svc == 'yrno':
        # No humidity, feelslike included in this one
        _ = [cols.pop(cols.index(x)) for x in ['feels-temp-avg', 'humidity']]
        df['date'] = df['from']
    df = df[cols]
예제 #7
0
import re
from typing import List
from slacktools import BlockKitBuilder as bkb
from servertools import XPathExtractor, SlackComm
from kavalkilu import LogWithInflux

logg = LogWithInflux('wotd')

wotd_url = 'https://www.dictionary.com/e/word-of-the-day/'
sotd_url = 'https://www.thesaurus.com/e/synonym-of-the-day/'


def extract_otd(url: str, is_wotd: bool = False) -> List[dict]:
    """Extract Synonym/Word of the day"""
    xtool = XPathExtractor(url)
    # Get the most recent WOTD
    class_prefix = 'wotd' if is_wotd else 'sotd'
    title_section = 'word' if is_wotd else 'synonym'
    otd = xtool.xpath(f'//div[contains(@class, "{class_prefix}-items")]/div',
                      single=True)

    sotd_xpath = f'.//div[contains(@class, "{class_prefix}-item__title")]'
    wotd_xpath = f'.//div[contains(@class, "otd-item-headword__word")]'
    word = xtool.xpath(wotd_xpath if is_wotd else sotd_xpath,
                       obj=otd,
                       get_text=True).strip()
    # Pronunciation
    pronunc = xtool.xpath(
        './/div[contains(@class, "otd-item-headword__pronunciation")]',
        obj=otd,
        single=True,
예제 #8
0
"""Checks slackmojis daily for new additions"""
import json
from kavalkilu import Path, LogWithInflux
from servertools import SlackComm, XPathExtractor

logg = LogWithInflux('emoji-scraper')
scom = SlackComm(bot='viktor', parent_log=logg)

p = Path()
fpath = p.easy_joiner(p.data_dir, 'slackmojis.json')
chan = 'emoji_suggestions'
url = 'https://slackmojis.com/emojis/recent'
xpath_extractor = XPathExtractor(url)

emoji_list = xpath_extractor.xpath('//ul[@class="emojis"]', single=True)
emojis = emoji_list.getchildren()

# Read in the previous emoji id list
if not p.exists(fpath):
    prev_emojis = {}
else:
    with open(fpath) as f:
        prev_emojis = json.loads(f.read())

new_emojis = {}
for emoji in emojis:
    emo_id = emoji.getchildren()[0].get('data-emoji-id-name')
    emo_name = emoji.getchildren()[0].getchildren()[1].text.strip()
    if emo_id not in prev_emojis.keys():
        # Get link and add to the id list
        emo_link = emoji.findall('.//img')[0].get('src')
예제 #9
0
import re
import pandas as pd
from servertools import BrowserAction
from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBTracker

logg = LogWithInflux('apt-prices', log_to_file=True)
influx = InfluxDBLocal(InfluxDBTracker.APT_PRICES)
ba = BrowserAction(headless=True, parent_log=logg)
url = 'https://www.maac.com/available-apartments/?propertyId=611831&Bedroom=2%20Bed'

ba.get(url)

ba.medium_wait()
listings = ba.get_elem(
    '//div[contains(@class, "apartment-listing")]/div[contains(@class, "apartment")]',
    single=False)
logg.debug(f'Returned {len(listings)} initial listings...')
apt_list = []
for apt in listings:
    apt_dict = {}
    # Get unit
    unit = apt.find_element_by_xpath(
        './/div[contains(@class, "apartment__unit-number")]').text
    # Clean unit of non-number chars
    unit = re.search(r'\d+', unit).group()
    # Get sqft, beds, baths
    desc = apt.find_element_by_xpath(
        './/div[contains(@class, "apartment__unit-description")]').text.split(
            '\n')
    for d in desc:
        for item in ['bed', 'bath', 'sq. ft.']:
예제 #10
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from flask import Flask
from kavalkilu import LogWithInflux
from servertools import get_secret_file
from api.hosts.hosts import hosts
from api.hosts.keys import keys

app = Flask(__name__)
for blueprint in [keys, hosts]:
    app.register_blueprint(blueprint)


@app.route('/')
def main_page():
    return 'Main page!'


@app.errorhandler(500)
def handle_errors(error):
    return 'Not found!'


if __name__ == '__main__':
    secret_key = get_secret_file('FLASK_SECRET')
    app.secret_key = secret_key
    app.run(host='0.0.0.0', port='5002')
    # Instantiate log here, as the hosts API is requested to comunicate with influx
    log = LogWithInflux('hosts_api')
예제 #11
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Method to collect temperature and other useful data from ecobee"""
from datetime import datetime, timedelta
from kavalkilu import LogWithInflux
from servertools import EcoBee

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('ecobee_temp', log_dir='weather')
eco = EcoBee()

temp_now = datetime.now()
temp_10m_ago = (temp_now - timedelta(minutes=10))
data = eco.collect_data(temp_10m_ago, temp_now)

log.debug('Temp logging successfully completed.')

log.close()
예제 #12
0
import time
from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBHomeAuto
from servertools import HueBulb

INTERVAL_MINS = 30
WAIT_S = 290
end_time = time.time() + INTERVAL_MINS * 60
logg = LogWithInflux('mushroom-grow-toggle')
influx = InfluxDBLocal(InfluxDBHomeAuto.TEMPS)
h = HueBulb('mushroom-plug')
# TODO: Use HASS instead of Influx to get current values

rounds = 0
while end_time > time.time():
    if rounds % 2 == 0:
        # Turn on during even rounds
        h.turn_on()
    else:
        # Turn off for off rounds
        h.turn_off()
    rounds += 1
    logg.debug(f'Waiting {WAIT_S / 60:.0f} mins...')
    time.sleep(WAIT_S)

logg.close()
예제 #13
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Detects whether the garage door is up or down"""
from kavalkilu import LogWithInflux, HAHelper
from pitools import DistanceSensor
from pitools.peripherals import PiGarage

logg = LogWithInflux('garage_door', log_dir='gdoor')

TRIGGER_PIN = PiGarage.ultrasonic.trigger
ECHO_PIN = PiGarage.ultrasonic.echo
logg.debug('Initializing sensor...')
ds = DistanceSensor(TRIGGER_PIN, ECHO_PIN)

# Take an average of 10 readings
readings = []
logg.debug('Taking readings...')
for i in range(10):
    readings.append(ds.measure())

avg = sum(readings) / len(readings)

# Instantiate HASS
ha = HAHelper()
# Collect last reading
last_status = ha.get_state(PiGarage.ha_garage_door_sensor).get('state')

# Typically, reading is ca. 259cm when door is closed. ca. 50cm when open
if avg < 6000:
    status = 'open'
    # TODO: Depth when car is in
예제 #14
0
 def setUpClass(cls) -> None:
     cls.log = LogWithInflux('invest-test')
     cls.ed = EdgarCollector(start_year=2017, parent_log=cls.log)
예제 #15
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import datetime
import pandas as pd
from kavalkilu import LogWithInflux
from servertools import SlackWeatherNotification, NWSForecast, NWSForecastZone

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('frost_warn', log_dir='weather')

now = datetime.now()
weather = NWSForecast(NWSForecastZone.ATX)
hours_df = weather.get_hourly_forecast()
hours_df['date'] = pd.to_datetime(hours_df['date'])

# Filter by column & get only the next 12 hours of forecasted weather
cols = ['date', 'temp-avg', 'feels-temp-avg', 'dewpoint', 'wind-speed']
hours_df = hours_df.loc[hours_df.date < (now + pd.Timedelta(hours=12)), cols]

logic_dict = {
    'freeze': (hours_df['temp-avg'] < 0) & ((hours_df['dewpoint'] < -8) | (hours_df['wind-speed'] > 5)),
    'frost': (hours_df['temp-avg'] < 2) & ((hours_df['dewpoint'] < -6) | (hours_df['wind-speed'] >= 5)),
    'light frost': (hours_df['temp-avg'] < 2) & ((hours_df['dewpoint'] < -6) | (hours_df['wind-speed'] < 5)),
}

warning = None
for name, cond in logic_dict.items():
    if any(cond.tolist()):
        # We want the warnings to move from severe to relatively mild &
        # break on the first one that matches the condition
        warning = name
예제 #16
0
"""Sends latest temperature readings to HASS"""
from kavalkilu import LogWithInflux, InfluxDBHomeAuto, InfluxDBLocal, HAHelper


log = LogWithInflux('ha-temps', log_dir='weather')
influx = InfluxDBLocal(InfluxDBHomeAuto.TEMPS)

query = '''
    SELECT 
        last("temp") AS temp,
        last("humidity") AS humidity
    FROM "temps"
    WHERE 
        location =~ /mushroom|r6du|elutuba|wc|v2lis|freezer|fridge|kontor/
        AND time > now() - 30m
    GROUP BY 
        "location" 
    fill(null)
    ORDER BY ASC
'''
df = influx.read_query(query, time_col='time')
log.debug(f'Collected {df.shape[0]} rows of data')

log.debug('Beginning to send updates to HASS')
ha = HAHelper()
for i, row in df.iterrows():
    loc_name = row['location'].replace('-', '_')
    for sensor_type in ['temp', 'humidity']:
        dev_name = f'sensor.{loc_name}_{sensor_type}'
        log.debug(f'Updating {dev_name}...')
        ha.set_state(dev_name, data={'state': row[sensor_type]}, data_class=sensor_type)
예제 #17
0
ETL for RTL_433 json objects via syslog -> processed Dataframe -> influx

Note: depends on `rf_stream` already being running and feeding data to port 1433
    via `rtl_433 -F syslog::1433`
"""
import json
from json import JSONDecodeError
import socket
from datetime import datetime
import pandas as pd
from kavalkilu import InfluxDBLocal, InfluxDBHomeAuto, LogWithInflux, \
    GracefulKiller, Hosts, HOME_SERVER_HOSTNAME, HAHelper
from servertools import SlackComm


logg = LogWithInflux('rf_temp')
sc = SlackComm(parent_log=logg)
UDP_IP = Hosts().get_ip_from_host(HOME_SERVER_HOSTNAME)
UDP_PORT = 1433

# device id to device-specific data mapping
mappings = {
    210: {'name': 'neighbor-porch'},
    3092: {'name': 'ylemine-r6du'},
    5252: {'name': 'elutuba'},
    6853: {'name': 'kontor-wc'},
    8416: {'name': 'alumine-r6du'},
    9459: {'name': 'freezer'},
    9533: {'name': 'kontor'},
    10246: {'name': 'v2lisuks'},
    12476: {'name': 'suur-wc'},
예제 #18
0
"""Message links to memes for Viktor to post periodically"""
import os
import re
import time
import numpy as np
from typing import List
from datetime import datetime, timedelta
from slacktools import SlackTools
from kavalkilu import Keys, LogWithInflux


logg = LogWithInflux('memeraker')
vcreds = Keys().get_key('viktor')
st = SlackTools(**vcreds)
user_me = 'UM35HE6R5'

# Wine review text & previous stop timestamp
ddir = os.path.join(os.path.expanduser('~'), 'data')
fpath = os.path.join(ddir, 'mkov_wines.txt')
ts_path = os.path.join(ddir, 'last_memeraker_ts')


def post_memes(reviews: List[str], memes: List[str], wait_min: int = 5, wait_max: int = 60):
    # Begin the upload process, include a review
    for meme in memes:
        review = np.random.choice(reviews, 1)[0]
        st.upload_file('memes-n-shitposts', meme, 'image', is_url=True, txt=review)
        # Wait some seconds before posting again
        wait_s = np.random.choice(range(wait_min, wait_max), 1)[0]
        logg.debug(f'Waiting {wait_s}s.')
        time.sleep(wait_s)
예제 #19
0
"""Collect current weather data"""
from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBHomeAuto
from servertools import OpenWeather, OWMLocation

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('local', log_dir='weather')
influx = InfluxDBLocal(InfluxDBHomeAuto.WEATHER)

current = OpenWeather(OWMLocation.ATX).current_weather()

# Push all weather data into influx
current = current.drop('date', axis=1)
current['loc'] = 'austin'
influx.write_df_to_table(current, 'loc', current.columns.tolist()[:-1])
influx.close()

log.debug('Temp logging successfully completed.')

log.close()
import tempfile
from datetime import datetime as dt, timedelta
from kavalkilu import Hosts, LogWithInflux
from easylogger import ArgParse
from servertools import SlackComm, Amcrest, VidTools

logg = LogWithInflux('motion_alerts')
sc = SlackComm(parent_log=logg)

args = [{
    'names': ['-c', '--camera'],
    'other': {
        'action': 'store',
        'default': 'ac-allr6du'
    }
}, {
    'names': ['-i', '--interval'],
    'other': {
        'action': 'store',
        'default': '60'
    }
}]
ap = ArgParse(args, parse_all=False)
CAMERA = ap.arg_dict.get('camera')
INTERVAL_MINS = int(ap.arg_dict.get('interval'))
start_dt = (dt.now() - timedelta(minutes=INTERVAL_MINS)).replace(second=0,
                                                                 microsecond=0)
end_dt = (start_dt + timedelta(minutes=INTERVAL_MINS))

cam_ip = Hosts().get_ip_from_host(CAMERA)
cam = Amcrest(cam_ip)
예제 #21
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Read temperature and humidity from living room"""
from kavalkilu import LogWithInflux
from pitools import Sensor
from pitools.peripherals import PiElutuba


logg = LogWithInflux('elutuba_temp', log_dir='weather')

# Set the pin (BCM)
sensor = Sensor('DHT22', data_pin=PiElutuba.dht.pin)
# Take readings & log to db
sensor.log_to_db()

logg.debug('Temp logging successfully completed.')

logg.close()
예제 #22
0
import subprocess
from kavalkilu import LogWithInflux, HOME_SERVER_HOSTNAME, Hosts

logg = LogWithInflux('rf_stream', log_dir='rf')
serv_ip = Hosts().get_ip_from_host(HOME_SERVER_HOSTNAME)
cmd = ['/usr/local/bin/rtl_433', '-F', f'syslog:{serv_ip}:1433']

logg.info(f'Sending command: {" ".join(cmd)}')
process = subprocess.Popen(cmd,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT)
process_output, _ = process.communicate()
logg.debug(f'Process output: {process_output}')
예제 #23
0
import os
from datetime import datetime as dt, timedelta
from kavalkilu import LogWithInflux, DateTools, InfluxDBLocal, InfluxDBPiHole, SQLLiteLocal, Hosts
from servertools import SlackComm

logg = LogWithInflux('pihole_etl', log_to_db=True)
sc = SlackComm(parent_log=logg)
hosts = {x['ip']: x['name'] for x in Hosts().get_all_hosts()}
datetools = DateTools()

FTL_DB_PATH = os.path.join('/etc', *['pihole', 'pihole-FTL.db'])
sqll = SQLLiteLocal(FTL_DB_PATH)

INTERVAL_MINS = 60
end = dt.now().astimezone().replace(second=0, microsecond=0)
start = (end - timedelta(minutes=INTERVAL_MINS))
unix_start = datetools.dt_to_unix(start, from_tz='US/Central')
unix_end = datetools.dt_to_unix(end, from_tz='US/Central')

query = f"""
    SELECT
        client
        , domain
        , CASE
            WHEN status = 0 THEN 'UNKNOWN'
            WHEN status = 1 OR status > 3 THEN 'BLOCKED'
            WHEN status = 2 OR status = 3 THEN 'ALLOWED' 
            ELSE 'UNKNOWN'
        END AS query_status
        , CASE
            WHEN status = 0 THEN 'NO ANSWER'
예제 #24
0
"""For joining timelapse shots"""
import os
from moviepy.editor import ImageClip, concatenate_videoclips
from kavalkilu import Path, LogWithInflux
from servertools import SlackComm

log = LogWithInflux('timelapse_combi')
p = Path()
tl_dir = p.easy_joiner(p.data_dir, 'timelapse')
fnames = {}
for dirpath, _, filenames in os.walk(tl_dir):
    dirname = os.path.basename(dirpath)
    if dirname != 'timelapse':
        fnames[os.path.basename(dirpath)] = filenames

files = []
# Begin combining shots
for k, v in fnames.items():
    if not any([k.startswith(x) for x in ['ac-', 're-']]):
        continue
    log.debug(f'Working on {k}. {len(v)} files.')
    full_paths = sorted([os.path.join(tl_dir, *[k, x]) for x in v])
    clips = []
    for fpath in full_paths:
        try:
            clips.append(ImageClip(fpath).set_duration(1))
        except ValueError:
            log.debug(f'Error with this path: {fpath}')
            continue
    clip = concatenate_videoclips(clips)
    clip = clip.set_fps(30).speedx(30)
예제 #25
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Activates nighttime mode on cameras"""
from servertools import Amcrest
from kavalkilu import LogWithInflux, Hosts

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('cam_night')
hosts = Hosts()
# Get only cameras without numbers in the name
cam_info_list = hosts.get_hosts_and_ips(r'^ac-(ga|el)')

for cam_dict in cam_info_list:
    # Instantiate cam & arm
    cam = Amcrest(cam_dict['ip'])
    if cam.camera_type != 'doorbell':
        cam.arm_camera(True)
        # publish.single(f'sensors/cameras/{cam.name}/status', 'ARMED', hostname='tinyserv.local')

log.close()
예제 #26
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Read temperature and humidity from living room"""
from kavalkilu import LogWithInflux
from pitools import Sensor
from pitools.peripherals import PiGarage

logg = LogWithInflux('garage_temp', log_dir='weather')

sensor = Sensor('DALLAS', serial=PiGarage.dallas.sn)
# Take readings & log to db
sensor.measure_and_log_to_db(send_to_ha=True)

logg.debug('Temp logging successfully completed.')

logg.close()
예제 #27
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Determines if mobile is connected to local network. If not, will arm the cameras"""
from servertools import Amcrest, OpenWRT
from kavalkilu import LogWithInflux, Hosts

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('cam_active')
ow = OpenWRT()
hosts = Hosts()
# Get only cameras without numbers in the name
cam_info_list = hosts.get_hosts_and_ips(r'(?!^ac-.*(\d.*|doorbell)$)^ac-.+$')

res_list = []
currently_active_ips = ow.get_active_connections()
# Check if mobile(s) are connected to LAN
for ip in [i['ip'] for i in hosts.get_hosts_and_ips('an-[bm]a.*')]:
    res_list.append(ip in currently_active_ips.keys())

# If anyone home, don't arm, otherwise arm
arm_cameras = not any(res_list)
arm_status = 'ARMED' if arm_cameras else 'UNARMED'
if not arm_cameras:
    log.debug(
        'One of the devices are currently in the network. Disabling motion detection.'
    )
else:
    log.debug(
        'None of the devices are currently in the network. Enabling motion detection.'
    )
for cam_dict in cam_info_list:
예제 #28
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Log memory, cpu use and temp of each machine"""
from kavalkilu import LogWithInflux, InfluxDBHomeAuto
from pitools import Sensor

logg = LogWithInflux('machine_data')
# Set the pin (BCM)
for sensor_name in ['CPU', 'MEM', 'CPUTEMP', 'DISK']:
    logg.debug(f'Logging {sensor_name}...')
    sensor = Sensor(sensor_name)
    # Take readings & log to db
    sensor.measure_and_log_to_db(
        tbl=InfluxDBHomeAuto().__getattribute__(sensor_name), n_times=2)

logg.debug('Temp logging successfully completed.')

logg.close()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import pandas as pd
from kavalkilu import LogWithInflux
from servertools import NWSForecast, NWSForecastZone, SlackWeatherNotification

# Initiate Log, including a suffix to the log name to denote which instance of log is running
log = LogWithInflux('significant_change', log_dir='weather')

swno = SlackWeatherNotification(parent_log=log)

now = datetime.now()
tomorrow = (now + timedelta(days=1))
weather = NWSForecast(NWSForecastZone.ATX)
hours_df = weather.get_hourly_forecast()
hours_df['date'] = pd.to_datetime(hours_df['date'])

# focus on just the following day's measurements
nextday = hours_df[hours_df['date'].dt.day == tomorrow.day].copy()
nextday['day'] = nextday['date'].dt.day
nextday['hour'] = nextday['date'].dt.hour
temp_dict = {}
metrics_to_collect = dict(
    zip(
        ['temp-avg', 'feels-temp-avg', 'precip-intensity', 'precip-prob'],
        ['temp', 'apptemp', 'precip_int', 'precip_prob'],
    ))
# Determine which hours are important to examine (e.g., for commuting / outside work)
important_hours = [0, 6, 12, 15, 18]
for hour in important_hours:
예제 #30
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import time
import requests
from grafana_api.grafana_face import GrafanaFace
from kavalkilu import Keys, LogWithInflux
from servertools import SlackComm


log = LogWithInflux('grafana_snapper')
creds = Keys().get_key('grafana')
scom = SlackComm()


def get_pic_and_upload(url, name):
    """Captures dashboard panel at URL and uploads to #notifications slack channel"""

    resp = requests.get(url, headers={'Authorization': f'Bearer {creds["token"]}'})

    temp_file = os.path.abspath('/tmp/dash_snap.png')
    with open(temp_file, 'wb') as f:
        for chunk in resp:
            f.write(chunk)
    scom.st.upload_file(scom.notify_channel, temp_file, name)


# The URL template to use
base_url = 'http://{host}/render/{name}?orgId=1&from={from}&to={to}&panelId={pid}&width=1000&height=500'

# time range to snap