#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Read temperature and humidity from living room""" from kavalkilu import LogWithInflux from pitools import Sensor from pitools.peripherals import PiElutuba logg = LogWithInflux('elutuba_temp', log_dir='weather') # Set the pin (BCM) sensor = Sensor('DHT22', data_pin=PiElutuba.dht.pin) # Take readings & log to db sensor.log_to_db() logg.debug('Temp logging successfully completed.') logg.close()
import pandas as pd from servertools import BrowserAction from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBTracker logg = LogWithInflux('apt-prices', log_to_file=True) influx = InfluxDBLocal(InfluxDBTracker.APT_PRICES) ba = BrowserAction(headless=True, parent_log=logg) url = 'https://www.maac.com/available-apartments/?propertyId=611831&Bedroom=2%20Bed' ba.get(url) ba.medium_wait() listings = ba.get_elem( '//div[contains(@class, "apartment-listing")]/div[contains(@class, "apartment")]', single=False) logg.debug(f'Returned {len(listings)} initial listings...') apt_list = [] for apt in listings: apt_dict = {} # Get unit unit = apt.find_element_by_xpath( './/div[contains(@class, "apartment__unit-number")]').text # Clean unit of non-number chars unit = re.search(r'\d+', unit).group() # Get sqft, beds, baths desc = apt.find_element_by_xpath( './/div[contains(@class, "apartment__unit-description")]').text.split( '\n') for d in desc: for item in ['bed', 'bath', 'sq. ft.']: if item in d.lower():
- Click `Add Custom Emoji` - Upload file & give it a unique name! """ if len(new_emojis) > 0: blocks = [ scom.bkb.make_context_section( 'New Emojis :postal_horn::postal_horn::postal_horn:') ] for name, e_dict in new_emojis.items(): blocks.append( scom.bkb.make_block_section( f'<{e_dict["link"]}|{e_dict["name"].replace(":", "")}>', accessory=scom.bkb.make_image_accessory( e_dict['link'], 'emoji'))) # Iterate through blocks. Slack limits posts by 50 blocks. for i in range(0, len(blocks), 50): logg.debug(f'Sending message block {i + 1}') scom.st.send_message(channel=chan, message='Emoji Report!', blocks=blocks[i:i + 50]) prev_emojis.update(new_emojis) else: logg.debug('No new emojis to send.') # Save data to path with open(fpath, 'w') as f: json.dump(prev_emojis, f) logg.close()
log = LogWithInflux('ha-temps', log_dir='weather') influx = InfluxDBLocal(InfluxDBHomeAuto.TEMPS) query = ''' SELECT last("temp") AS temp, last("humidity") AS humidity FROM "temps" WHERE location =~ /mushroom|r6du|elutuba|wc|v2lis|freezer|fridge|kontor/ AND time > now() - 30m GROUP BY "location" fill(null) ORDER BY ASC ''' df = influx.read_query(query, time_col='time') log.debug(f'Collected {df.shape[0]} rows of data') log.debug('Beginning to send updates to HASS') ha = HAHelper() for i, row in df.iterrows(): loc_name = row['location'].replace('-', '_') for sensor_type in ['temp', 'humidity']: dev_name = f'sensor.{loc_name}_{sensor_type}' log.debug(f'Updating {dev_name}...') ha.set_state(dev_name, data={'state': row[sensor_type]}, data_class=sensor_type) log.debug('Update completed.') log.close()
'temperature_C': 'temp', 'humidity': 'humidity' } def parse_syslog(ln: bytes) -> str: """Try to extract the payload from a syslog line.""" ln = ln.decode("ascii") # also UTF-8 if BOM if ln.startswith("<"): # fields should be "<PRI>VER", timestamp, hostname, command, pid, mid, sdata, payload fields = ln.split(None, 7) ln = fields[-1] return ln logg.debug('Establishing socket...') sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.bind((UDP_IP, UDP_PORT)) logg.debug('Connecting to Influx..') influx = InfluxDBLocal(InfluxDBHomeAuto.TEMPS) killer = GracefulKiller() # Set up methods to periodically send processed data packets to Influx interval = datetime.now() # Adjust interval to be an even 10 mins replace_mins = interval.minute - interval.minute % 10 interval = interval.replace(minute=replace_mins, second=0, microsecond=0) split_s = 300 # Data packet to influx interval logg.debug(f'Data packets sent to Influx every {split_s / 60} mins.') data_df = pd.DataFrame()
# Initiate Log, including a suffix to the log name to denote which instance of log is running log = LogWithInflux('forecast', log_dir='weather') influx = InfluxDBLocal(InfluxDBHomeAuto.WEATHER) # Number of hours we're looking forward period_h = 24 # Start & end of the lookahead p_start = (datetime.now() + timedelta(hours=1)) p_end = (p_start + timedelta(hours=period_h)) owm_fc = OpenWeather(OWMLocation.ATX).hourly_forecast() nws_fc = NWSForecast(NWSForecastZone.ATX).get_hourly_forecast() yrno_fc = YrNoWeather(YRNOLocation.ATX).hourly_summary() # Push all weather data into influx for svc, df in zip(['own', 'nws', 'yrno'], [owm_fc, nws_fc, yrno_fc]): log.debug(f'Collecting data from {svc.upper()}...') cols = ['date', 'humidity', 'temp-avg', 'feels-temp-avg'] if svc == 'nws': # Replace relative-humidity with humidity df['humidity'] = df['relative-humidity'] elif svc == 'yrno': # No humidity, feelslike included in this one _ = [cols.pop(cols.index(x)) for x in ['feels-temp-avg', 'humidity']] df['date'] = df['from'] df = df[cols] df = df.rename( columns={x: f'fc-{svc}-{x}' if x != 'date' else x for x in df.columns}) df = df[(df['date'] >= p_start.strftime('%F %H:00:00')) & (df['date'] <= p_end.strftime('%F %H:00:00'))] df['loc'] = 'austin'
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Performs a speedtest assessment""" from speedtest import Speedtest from datetime import datetime as dt import pandas as pd from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBHomeAuto logg = LogWithInflux('speedtest') influx = InfluxDBLocal(InfluxDBHomeAuto.NETSPEED) # Prep speedtest by getting nearby servers logg.debug('Instantiating speedtest object.') speed = Speedtest() servers = speed.get_servers([]) best_server = speed.get_best_server() logg.debug('Server selected. Beginning speed test.') down = speed.download() / 1000000 up = speed.upload() / 1000000 ping = best_server['latency'] if 'latency' in best_server.keys() else None logg.debug(f'Returned down: {down:.1f} up: {up:.1f} ping: {ping:.1f}') # put variables into pandas type dataframe test = pd.DataFrame( { 'test_date': dt.now(), 'download': down, 'upload': up, 'ping': ping }, index=[0])
# Search if the time range falls in one of the ranges that were already downloaded if not any([ all([x['start'] < start < x['end'], end < x['end']]) for x in already_downloaded ]): # Check if start time is already covered clip_st = start clip_end = end for dl_dict in already_downloaded: if dl_dict['start'] < start < dl_dict['end']: # Top-end of range already covered clip_st = (dl_dict['end'] + timedelta(seconds=1)) if dl_dict['start'] < end < dl_dict['end']: # Bottom-end of range already covered clip_end = (dl_dict['start'] - timedelta(seconds=1)) logg.debug( f'Downloading file(s) covering {clip_st:%T} to {clip_end:%T}..') already_downloaded += cam.download_files_from_range( clip_st, clip_end, temp_dir) filepaths = list( sorted( set([ x['path'] for x in already_downloaded if x['start'] < start < x['end'] or x['start'] < end < x['end'] ]))) # Clip & combine the video files, save to temp file logg.debug('Clipping video files and combining them...') fpath = vt.make_clip_from_filenames(start, end, filepaths, trim_files=True,
import subprocess from kavalkilu import LogWithInflux, HOME_SERVER_HOSTNAME, Hosts logg = LogWithInflux('rf_stream', log_dir='rf') serv_ip = Hosts().get_ip_from_host(HOME_SERVER_HOSTNAME) cmd = ['/usr/local/bin/rtl_433', '-F', f'syslog:{serv_ip}:1433'] logg.info(f'Sending command: {" ".join(cmd)}') process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) process_output, _ = process.communicate() logg.debug(f'Process output: {process_output}')
class BrowserAction: """ Performs action to Selenium-class webdriver Args for __init__: driver: Selenium-type driver class """ # Predefined wait ranges, in seconds _slow_wait = [15, 30] _medium_wait = [5, 15] _fast_wait = [1, 8] REST_S = 2 # Standard seconds to rest between attempts STD_ATTEMPTS = 3 # Standard attempts to make before failing def __init__(self, driver_path: str = '/usr/bin/chromedriver', timeout: float = 60, options: List[str] = None, headless: bool = True, parent_log: 'Log' = None): self.driver = ChromeDriver(driver_path, timeout, options, headless) self.pid = self.driver.service.process.pid self.port = self.driver.service.port self.log = LogWithInflux(parent_log, child_name=self.__class__.__name__) self.elem_by_xpath = self.driver.find_element_by_xpath self.elems_by_xpath = self.driver.find_elements_by_xpath self.log.debug( f'Chromedriver started up with pid: {self.pid} receiving on port: {self.port}' ) def _do_attempts(self, func: Callable, *args, sub_method: str = None, sub_method_input: str = None, attempts: int = 3, rest_s: float = 2) -> Optional[Any]: """Attempts a certain method for n times before gracefully failing""" for i in range(0, attempts): if i > 0: # Sleep between attempts, but let a one-off attempt through without much delay time.sleep(rest_s) try: thing = func(*args) if sub_method is not None: if sub_method_input is None: getattr(thing, sub_method)() break else: getattr(thing, sub_method)(sub_method_input) break else: return thing except Exception as e: self.log.error(f'Attempt {i + 1}: fail -- {e}') def tear_down(self): """Make sure the browser is closed on cleanup""" self.log.info('Shutting down browser.') self.driver.quit() def get(self, url: str): """ Navigates browser to url Args: url: str, url to navigate to """ self.log.debug(f'Loading url: {url}') self.driver.get(url) def click(self, xpath: str, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S): """ Clicks HTML element Args: xpath: str, xpath to element to click attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts """ self._do_attempts(self.elem_by_xpath, xpath, sub_method='click', attempts=attempts, rest_s=rest_s) def clear(self, xpath: str, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S): """ Clears form element of text Args: xpath: str, xpath to form element attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts """ self._do_attempts(self.elem_by_xpath, xpath, sub_method='clear', attempts=attempts, rest_s=rest_s) def enter(self, xpath: str, entry_text: str, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S): """ Enters text into form element Args: xpath: str, xpath to form entry_text: str, text to enter into form attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts """ self._do_attempts(self.elem_by_xpath, xpath, sub_method='send_keys', sub_method_input=entry_text, attempts=attempts, rest_s=rest_s) def elem_exists(self, xpath: str, attempts: int = 1, rest_s: float = REST_S) -> bool: """ Determines if particular element exists Args: xpath: str, xpath to HTML element attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts Returns: True if exists """ elem = self._do_attempts(self.elem_by_xpath, xpath, attempts=attempts, rest_s=rest_s) if elem is not None: return True return False def get_elem(self, xpath: str, single: bool = True, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S) ->\ Union[WebElement, List[WebElement]]: """ Returns HTML elements as selenium objects Args: xpath: str, xpath of element to return single: boolean, True if returning only one element. default: True attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts Returns: HTML element(s) matching xpath text """ if single: return self._do_attempts(self.elem_by_xpath, xpath, attempts=attempts, rest_s=rest_s) else: return self._do_attempts(self.elems_by_xpath, xpath, attempts=attempts, rest_s=rest_s) def get_text(self, xpath: str, single: bool = True, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S) ->\ Union[str, List[str]]: """ Returns text in element(s) Args: xpath: str, xpath to element single: boolean, Whether to extract from single element or multiple. default = True attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts Returns: Text from inside element(s) """ if single: elem = self._do_attempts(self.elem_by_xpath, xpath, attempts=attempts, rest_s=rest_s) if elem is not None: return elem.text else: elems = self._do_attempts(self.elems_by_xpath, xpath, attempts=attempts, rest_s=rest_s) text_list = [] for e in elems: if e is not None: text_list.append(e.text) return text_list def remove(self, xpath: str, single: bool = True, attempts: int = STD_ATTEMPTS, rest_s: float = REST_S): """ Uses JavaScript commands to remove desired element Args: xpath: str, xpath to element single: boolean whether to apply to single element or multiple. default = True attempts: int, number of attempts to make before failing rest_s: int, number of seconds to rest between attempts """ script = """ var element = arguments[0]; element.remove(); """ if single: elem = self._do_attempts(self.elem_by_xpath, xpath, attempts=attempts, rest_s=rest_s) if elem is not None: self.driver.execute_script(script, elem) else: elems = self._do_attempts(self.elems_by_xpath, xpath, attempts=attempts, rest_s=rest_s) for e in elems: if e is not None: self.driver.execute_script(script, e) def add_style_to_elem(self, elem: WebElement, css_str: str): """Injects CSS into elem style""" js = f"arguments[0].setAttribute('style', '{css_str}');" self.driver.execute_script(js, elem) def click_by_id(self, elem_id: float): js = f'document.getElementById("{elem_id}").click();' self.driver.execute_script(js) def rand_wait(self, sleep_range_secs: List[int]): """ Determines sleep time as random number between upper and lower limit, then sleeps for that given time. After sleep, moves randomly vertically and horizontally on page for up to four times Args: sleep_range_secs: list, min and max number of seconds to sleep """ sleep_range_secs = sorted(sleep_range_secs) if len(sleep_range_secs) == 2: sleep_secs_lower, sleep_secs_higher = sleep_range_secs elif len(sleep_range_secs) > 2: sleep_secs_lower, sleep_secs_higher = sleep_range_secs[ 0], sleep_range_secs[-1] else: raise ValueError( 'Input for sleep range must be at least two items') sleeptime = randint(sleep_secs_lower, sleep_secs_higher) self.log.debug(f'Waiting {sleeptime}s') time.sleep(sleeptime) # after wait period, scroll through window randomly for i in range(4): r_x = randint(-20, 20) r_y = randint(150, 300) self.scroll_absolute(direction=f'{r_x},{r_y}') def fast_wait(self): self.rand_wait(self._fast_wait) def medium_wait(self): self.rand_wait(self._medium_wait) def slow_wait(self): self.rand_wait(self._slow_wait) def scroll_to_element(self, elem: WebElement, use_selenium_method: bool = True): """ Scrolls to get element in view Args: elem: Selenium-class element use_selenium_method: bool, if True, uses built-in Selenium method of scrolling an element to view otherwise, uses Javascript (scrollIntoView) """ if use_selenium_method: actions = ActionChains(self.driver) actions.move_to_element(elem).perform() else: scroll_center_script = """ var viewPortHeight = Math.max(document.documentElement.clientHeight, window.innerHeight || 0); var elementTop = arguments[0].getBoundingClientRect().top; window.scrollBy(0, elementTop-(viewPortHeight/2)); """ self.driver.execute_script(scroll_center_script, elem) def scroll_absolute(self, direction: str = 'up'): """Scrolls all the way up/down or to specific x,y coordinates""" if direction == 'up': coords = '0, 0' elif direction == 'down': coords = '0, document.body.scrollHeight' else: if ',' in direction: coords = direction else: raise ValueError( 'Invalid parameters entered. Must be an x,y coordinate, or up/down command.' ) self.driver.execute_script(f'window.scrollTo({coords});')
def post_memes(reviews: List[str], memes: List[str], wait_min: int = 5, wait_max: int = 60): # Begin the upload process, include a review for meme in memes: review = np.random.choice(reviews, 1)[0] st.upload_file('memes-n-shitposts', meme, 'image', is_url=True, txt=review) # Wait some seconds before posting again wait_s = np.random.choice(range(wait_min, wait_max), 1)[0] logg.debug(f'Waiting {wait_s}s.') time.sleep(wait_s) # Read in the last timestamp. if nothing, default to one hour ago if os.path.exists(ts_path): with open(ts_path) as f: last_timestamp = datetime.fromtimestamp(float(f.read().strip())) logg.debug(f'Found timestamp of {last_timestamp}') else: last_timestamp = (datetime.now() - timedelta(minutes=60)) logg.debug(f'No pre-existing timestamp found. Setting as {last_timestamp}') # Scan Viktor's dms from up to 5 mins ago msgs = st.search_messages_by_date(channel='memeraker', after_date=last_timestamp, after_ts=last_timestamp, max_results=100) # Search results are ordered from the most recent, so set the most recent one # as the timestamp to look from at the next instantiation of this script if len(msgs) > 0: logg.debug('Rewriting timestamp file with latest result\'s timestamp') # Rewrite the timestamp file with open(ts_path, 'w') as f: f.write(msgs[0]['ts'])
import time from kavalkilu import LogWithInflux, InfluxDBLocal, InfluxDBHomeAuto from servertools import HueBulb INTERVAL_MINS = 30 WAIT_S = 290 end_time = time.time() + INTERVAL_MINS * 60 logg = LogWithInflux('mushroom-grow-toggle') influx = InfluxDBLocal(InfluxDBHomeAuto.TEMPS) h = HueBulb('mushroom-plug') # TODO: Use HASS instead of Influx to get current values rounds = 0 while end_time > time.time(): if rounds % 2 == 0: # Turn on during even rounds h.turn_on() else: # Turn off for off rounds h.turn_off() rounds += 1 logg.debug(f'Waiting {WAIT_S / 60:.0f} mins...') time.sleep(WAIT_S) logg.close()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Detects whether the garage door is up or down""" from kavalkilu import LogWithInflux, HAHelper from pitools import DistanceSensor from pitools.peripherals import PiGarage logg = LogWithInflux('garage_door', log_dir='gdoor') TRIGGER_PIN = PiGarage.ultrasonic.trigger ECHO_PIN = PiGarage.ultrasonic.echo logg.debug('Initializing sensor...') ds = DistanceSensor(TRIGGER_PIN, ECHO_PIN) # Take an average of 10 readings readings = [] logg.debug('Taking readings...') for i in range(10): readings.append(ds.measure()) avg = sum(readings) / len(readings) # Instantiate HASS ha = HAHelper() # Collect last reading last_status = ha.get_state(PiGarage.ha_garage_door_sensor).get('state') # Typically, reading is ca. 259cm when door is closed. ca. 50cm when open if avg < 6000: status = 'open' # TODO: Depth when car is in
whil_url = "https://connect.whil.com/virginpulsesso/redirect?destination=home" ba.get(whil_url) ba.medium_wait() # Play a specific session body_sense_url = "https://connect.whil.com/goaltags/thrive-mindfulness-101/sessions/sense-the-body" ba.get(body_sense_url) ba.medium_wait() ba.click('//*/img[@alt="play"]') # Wait five-ish mins time.sleep(310) message_channel_and_log('Vpulse script booted up') today = dtt.today() ba = BrowserAction(logg.log_name, 'chrome', headless=not debug) logg.debug('Chrome instantiated.') vpulse_home_url = 'https://member.virginpulse.com/' points_url = 'https://app.member.virginpulse.com/#/rewards/earn' ba.get(vpulse_home_url) ba.slow_wait() ba.enter('//input[@id="username"]', creds['user']) ba.enter('//input[@id="password"]', creds['password']) ba.click('//input[@id="kc-login"]') ba.medium_wait() # Look for a security check sec_form = ba.elem_exists('//input[@value="Send code"]') if sec_form: message_channel_and_log( f'<@{user_me}>, Security code was requested. '
class EdgarCollector: """Handles the entire process of collecting SEC filing data and processing that into financial ratios""" base_url = 'https://www.sec.gov' def __init__(self, start_year: float = 2017, parent_log: LogWithInflux = None): self.logg = LogWithInflux(parent_log, child_name=self.__class__.__name__) # Set temp dir for downloading the edgar filings self.tmp_dir = os.path.join(tempfile.gettempdir(), 'edgar') # Get ticker to CIK mapping self.logg.debug('Downloading ticket to CIK mapping...') self.t2cik_df = self.get_ticker_to_cik_map() # Download EDGAR indexes and retrieve the filepaths associated with them self.logg.debug('Downloading indexes (this may take ~2 mins)...') self.edgar_fpaths = self._download_indexes(start_year) @staticmethod def get_ticker_to_cik_map() -> pd.DataFrame: """Collects the mapping of stock ticker to CIK""" t2cik_df = pd.read_csv('https://www.sec.gov/include/ticker.txt', sep='\t', header=None) t2cik_df.columns = ['tick', 'cik'] return t2cik_df def _download_indexes(self, since_year: float) -> List[str]: """Downloads company indexes from Edgar into temporary directory""" # Begin download edgar.download_index(self.tmp_dir, since_year=since_year) # Retrieve the file paths downloaded fpaths = [] for a, b, fnames in os.walk(self.tmp_dir): fpaths = [os.path.join(self.tmp_dir, f) for f in fnames] break self.logg.debug(f'Collected {len(fpaths)} files...') return sorted(fpaths) def get_data_for_stock_tickers(self, tickers: Union[str, List[str]]) -> pd.DataFrame: """Collects the quarterly financial ratios for a given stock ticker""" if isinstance(tickers, str): tickers = [tickers] stocks_df = pd.DataFrame({'tick': list(map(str.lower, tickers))}) # Merge stocks on cik stocks_df = stocks_df.merge(self.t2cik_df, how='left', on='tick') ratio_df = pd.DataFrame() # Capture filing locations for each quarter for each stock for i, fpath in enumerate(sorted(self.edgar_fpaths)): self.logg.debug(f'Working on file {i + 1} of {len(self.edgar_fpaths)}') # Parse the quarter year, qtr = os.path.split(fpath)[1].split('.')[0].split('-') self.logg.debug(f'Parsed: {qtr}-{year}') df = pd.read_csv(fpath, sep='|', header=None) df.columns = ['cik', 'company_name', 'form', 'filing_date', 'txt_file', 'html_file'] # Filter by CIK and on either 10-K (annual) or 10-Q (quarterly) reports df = df.loc[df.cik.isin(stocks_df.cik) & df.form.isin(['10-Q', '10-K'])] self.logg.debug(f'Matched {df.shape[0]} rows of data') for idx, row in df.iterrows(): # Grab the HTML file - this is the file list url = f'{self.base_url}/Archives/{row["html_file"]}' xpe = XPathExtractor(url) # Grab the link to the 10-* file form_type = row['form'] form_row = xpe.xpath(f'//table[@class="tableFile"]/tr[td[text()="{form_type}"]]', single=True) form_url = xpe.xpath('./td/a', obj=form_row, single=True).get('href') # Grab the 10-* file data complete_url = f'{self.base_url}{form_url}' # Work on SOps (this sets a point to look 'after') stmt_ops = EdgarFinStatement(complete_url, 'STATEMENTS? OF OPERATIONS') net_sales = stmt_ops.get_line_item('net sales') cogs = stmt_ops.get_line_item('cost of sales') op_income = stmt_ops.get_line_item('operating income') net_income = stmt_ops.get_line_item('net income') eps = stmt_ops.get_line_item('earnings per share', line_after=True) shares = stmt_ops.get_line_item('shares used in computing earnings', line_after=True) # Work on Balance Sheet stmt_bs = EdgarFinStatement(complete_url, 'CONSOLIDATED BALANCE SHEETS') current_assets = stmt_bs.get_line_item('total current assets') intangible_assets = stmt_bs.get_line_item('intangible assets') total_assets = stmt_bs.get_line_item('total assets') current_liabilities = stmt_bs.get_line_item('total current liabilities') total_liabilities = stmt_bs.get_line_item('total liabilities') common_stock_eq = stmt_bs.get_line_item('common stock') total_shareholders_equity = stmt_bs.get_line_item('total shareholders') net_tangible_assets = total_assets - intangible_assets - total_liabilities # Work on Cash Flow (SCF) stmt_cf = EdgarFinStatement(complete_url, 'STATEMENTS OF CASH FLOWS') dep_and_amort = stmt_cf.get_line_item('depreciation and amortization') ebitda = op_income + dep_and_amort # Ratios asset_turnover = net_sales / net_tangible_assets profit_margin = net_income / net_sales debt_to_ebitda = total_liabilities / ebitda debt_to_equity = total_liabilities / total_shareholders_equity roa = net_income / total_assets roe = net_income / total_shareholders_equity bvps = total_shareholders_equity / shares # Apply ratios to dataframe ratio_df = ratio_df.append({ 'year': year, 'quarter': qtr, 'cik': row['cik'], 'source': row['form'], 'ato': asset_turnover, 'pm': profit_margin, 'd2ebitda': debt_to_ebitda, 'd2e': debt_to_equity, 'roa': roa, 'roe': roe, 'bvps': bvps }, ignore_index=True) # Merge the financial info back in with the stocks stocks_df = stocks_df.merge(ratio_df, how='left', on='cik') return stocks_df
log = LogWithInflux('timelapse_combi') p = Path() tl_dir = p.easy_joiner(p.data_dir, 'timelapse') fnames = {} for dirpath, _, filenames in os.walk(tl_dir): dirname = os.path.basename(dirpath) if dirname != 'timelapse': fnames[os.path.basename(dirpath)] = filenames files = [] # Begin combining shots for k, v in fnames.items(): if not any([k.startswith(x) for x in ['ac-', 're-']]): continue log.debug(f'Working on {k}. {len(v)} files.') full_paths = sorted([os.path.join(tl_dir, *[k, x]) for x in v]) clips = [] for fpath in full_paths: try: clips.append(ImageClip(fpath).set_duration(1)) except ValueError: log.debug(f'Error with this path: {fpath}') continue clip = concatenate_videoclips(clips) clip = clip.set_fps(30).speedx(30) fpath = os.path.join(tl_dir, f'concat_{k}.mp4') clip.write_videofile(fpath, fps=30) files.append(fpath) scom = SlackComm(parent_log=log)
, COUNT(DOMAIN) AS query_cnt FROM queries WHERE domain != '' AND timestamp BETWEEN '{unix_start}' AND '{unix_end}' GROUP BY timestamp , client , domain , query_status , status_info """ df = sqll.read_sql(query) logg.debug(f'Returned {df.shape[0]} rows of data.') # Convert unix back to dt df['timestamp'] = df['timestamp'].apply( lambda x: datetools.unix_to_dt(x, to_tz='US/Central')) # Lookup all known clients df['client'] = df['client'].replace(hosts) # Feed data into influx influx = InfluxDBLocal(InfluxDBPiHole.QUERIES) logg.debug('Writing dataframe to influx table.') influx.write_df_to_table( df, tags=['client', 'domain', 'query_status', 'status_info'], value_cols=['query_cnt'], time_col='timestamp')
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Log memory, cpu use and temp of each machine""" from kavalkilu import LogWithInflux, InfluxDBHomeAuto from pitools import Sensor logg = LogWithInflux('machine_data') # Set the pin (BCM) for sensor_name in ['CPU', 'MEM', 'CPUTEMP', 'DISK']: logg.debug(f'Logging {sensor_name}...') sensor = Sensor(sensor_name) # Take readings & log to db sensor.measure_and_log_to_db( tbl=InfluxDBHomeAuto().__getattribute__(sensor_name), n_times=2) logg.debug('Temp logging successfully completed.') logg.close()
# Initiate Log, including a suffix to the log name to denote which instance of log is running log = LogWithInflux('cam_active') ow = OpenWRT() hosts = Hosts() # Get only cameras without numbers in the name cam_info_list = hosts.get_hosts_and_ips(r'(?!^ac-.*(\d.*|doorbell)$)^ac-.+$') res_list = [] currently_active_ips = ow.get_active_connections() # Check if mobile(s) are connected to LAN for ip in [i['ip'] for i in hosts.get_hosts_and_ips('an-[bm]a.*')]: res_list.append(ip in currently_active_ips.keys()) # If anyone home, don't arm, otherwise arm arm_cameras = not any(res_list) arm_status = 'ARMED' if arm_cameras else 'UNARMED' if not arm_cameras: log.debug( 'One of the devices are currently in the network. Disabling motion detection.' ) else: log.debug( 'None of the devices are currently in the network. Enabling motion detection.' ) for cam_dict in cam_info_list: cam = Amcrest(cam_dict['ip']) if cam.camera_type != 'doorbell': cam.arm_camera(arm_cameras) log.close()
} }] ap = ArgParse(args, parse_all=False) CAMERA = ap.arg_dict.get('camera') INTERVAL_MINS = int(ap.arg_dict.get('interval')) start_dt = (dt.now() - timedelta(minutes=INTERVAL_MINS)).replace(second=0, microsecond=0) end_dt = (start_dt + timedelta(minutes=INTERVAL_MINS)) cam_ip = Hosts().get_ip_from_host(CAMERA) cam = Reolink(cam_ip, parent_log=logg) stream = 'sub' # Get dimensions of substream dims = cam.get_dimensions(stream) logg.debug(f'Video dimensions set to {dims[0]}x{dims[1]}') vt = VidTools(*dims, resize_perc=1, speed_x=6) temp_dir = tempfile.gettempdir() motion_files = cam.get_motion_files(start=start_dt, end=end_dt, streamtype=stream) logg.info(f'Found {len(motion_files)} motion events for the range selected.') already_downloaded = [] processed_files = [] durations = [] last_motion_end = None for mlog in motion_files: start = mlog['start'] end = mlog['end']